Skip to content

Commit

Permalink
Merge branch 'main' into aspinks_dh18145_s3writetimeout
Browse files Browse the repository at this point in the history
  • Loading branch information
AlSpinks committed Mar 3, 2025
2 parents d1c4d49 + fb6c6f4 commit 09544e2
Show file tree
Hide file tree
Showing 184 changed files with 16,129 additions and 5,725 deletions.
1 change: 1 addition & 0 deletions Integrations/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ dependencies {
implementation project(':log-factory')
implementation project(":util-thread")
implementation libs.commons.lang3
implementation libs.google.findbugs.jsr305

testImplementation project(':engine-test-utils')
testImplementation project(path: ':Base', configuration: 'tests')
Expand Down
1 change: 0 additions & 1 deletion authentication/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ plugins {
description 'authentication: Deephaven authentication and identity'

dependencies {
api project(':proto:proto-backplane-grpc')
implementation project(':log-factory')
implementation project(':Configuration')

Expand Down
2 changes: 2 additions & 0 deletions authentication/example-providers/mtls/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,6 @@ plugins {

dependencies {
shadow project(':grpc-java:grpc-mtls')
shadow platform(libs.grpc.bom)
shadow libs.grpc.api
}
2 changes: 2 additions & 0 deletions authorization-codegen/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,6 @@ dependencies {
implementation libs.grpc.services

implementation libs.squareup.javapoet

implementation libs.protobuf.java
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,20 @@ PublishingTools.setupPublications(project) { publication ->
def dependenciesNode = root.appendNode('dependencies')

project.configurations.shadow.allDependencies.each {
if ((it instanceof ProjectDependency) || ! (it instanceof SelfResolvingDependency)) {
if ((it instanceof ProjectDependency)) {
def dependencyNode = dependenciesNode.appendNode('dependency')
dependencyNode.appendNode('groupId', it.group)
BasePluginConvention base = it.dependencyProject.convention.getPlugin(BasePluginConvention)

dependencyNode.appendNode('artifactId', base.archivesBaseName)
dependencyNode.appendNode('version', it.version)
dependencyNode.appendNode('scope', 'runtime')
} else if (! (it instanceof SelfResolvingDependency)) {
def dependencyNode = dependenciesNode.appendNode('dependency')
dependencyNode.appendNode('groupId', it.group)
dependencyNode.appendNode('artifactId', it.name)
dependencyNode.appendNode('version', it.version)
dependencyNode.appendNode('scope', 'runtime')
}
}
}
Expand Down
1 change: 1 addition & 0 deletions cpp-client/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ def testCppClient = Docker.registerDockerTask(project, 'testCppClient') {
environmentVariable 'DH_HOST', deephavenDocker.containerName.get()
environmentVariable 'DH_PORT', '10000'
}
waitTimeMinutes = 1
containerDependencies.dependsOn = [deephavenDocker.healthyTask]
containerDependencies.finalizedBy = deephavenDocker.endTask
network = deephavenDocker.networkName.get()
Expand Down
2 changes: 1 addition & 1 deletion cpp-client/deephaven/tests/src/time_unit_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ TEST_CASE("Uploaded Arrow Timestamp units get normalized to nanos at FillChunk t
}
}

TEST_CASE("Uploaded Arrow Time64 units get normalized to nanos at FillChunk time", "[timeunit][.hidden]") {
TEST_CASE("Uploaded Arrow Time64 units get normalized to nanos at FillChunk time", "[timeunit]") {
auto tm = TableMakerForTests::Create();

std::vector<std::optional<InternalLocalTime<arrow::TimeUnit::MICRO>>> lt_micro;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -400,6 +400,15 @@ public <Other> ColumnDefinition<Other> withDataType(@NotNull final Class<Other>
: fromGenericType(name, newDataType, componentType, columnType);
}

public <Other> ColumnDefinition<Other> withDataType(
@NotNull final Class<Other> newDataType,
@Nullable final Class<?> newComponentType) {
// noinspection unchecked
return dataType == newDataType && componentType == newComponentType
? (ColumnDefinition<Other>) this
: fromGenericType(name, newDataType, newComponentType, columnType);
}

public ColumnDefinition<?> withName(@NotNull final String newName) {
return newName.equals(name) ? this : new ColumnDefinition<>(newName, dataType, componentType, columnType);
}
Expand Down
6 changes: 6 additions & 0 deletions engine/api/src/main/java/io/deephaven/engine/table/Table.java
Original file line number Diff line number Diff line change
Expand Up @@ -223,6 +223,12 @@ public interface Table extends
* Set this attribute to enable collection of barrage performance stats.
*/
String BARRAGE_PERFORMANCE_KEY_ATTRIBUTE = "BarragePerformanceTableKey";
/**
* Set an Apache Arrow POJO Schema to this attribute to control the column encoding used for barrage serialization.
* <p>
* See {@code org.apache.arrow.vector.types.pojo.Schema}.
*/
String BARRAGE_SCHEMA_ATTRIBUTE = "BarrageSchema";

// -----------------------------------------------------------------------------------------------------------------
// ColumnSources for fetching data by row key
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
// @formatter:off
package io.deephaven.chunk;

import io.deephaven.util.QueryConstants;
import io.deephaven.util.type.ArrayTypeUtils;
import io.deephaven.chunk.attributes.Any;

Expand Down Expand Up @@ -74,6 +75,12 @@ public final boolean get(int index) {
return data[offset + index];
}

// region isNull
public final boolean isNull(int index) {
return false;
}
// endregion isNull

@Override
public BooleanChunk<ATTR> slice(int offset, int capacity) {
ChunkHelpers.checkSliceArgs(size, offset, capacity);
Expand Down
7 changes: 7 additions & 0 deletions engine/chunk/src/main/java/io/deephaven/chunk/ByteChunk.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
// @formatter:off
package io.deephaven.chunk;

import io.deephaven.util.QueryConstants;
import io.deephaven.util.type.ArrayTypeUtils;
import io.deephaven.chunk.attributes.Any;

Expand Down Expand Up @@ -78,6 +79,12 @@ public final byte get(int index) {
return data[offset + index];
}

// region isNull
public final boolean isNull(int index) {
return data[offset + index] == QueryConstants.NULL_BYTE;
}
// endregion isNull

@Override
public ByteChunk<ATTR> slice(int offset, int capacity) {
ChunkHelpers.checkSliceArgs(size, offset, capacity);
Expand Down
7 changes: 7 additions & 0 deletions engine/chunk/src/main/java/io/deephaven/chunk/CharChunk.java
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
//
package io.deephaven.chunk;

import io.deephaven.util.QueryConstants;
import io.deephaven.util.type.ArrayTypeUtils;
import io.deephaven.chunk.attributes.Any;

Expand Down Expand Up @@ -73,6 +74,12 @@ public final char get(int index) {
return data[offset + index];
}

// region isNull
public final boolean isNull(int index) {
return data[offset + index] == QueryConstants.NULL_CHAR;
}
// endregion isNull

@Override
public CharChunk<ATTR> slice(int offset, int capacity) {
ChunkHelpers.checkSliceArgs(size, offset, capacity);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
// @formatter:off
package io.deephaven.chunk;

import io.deephaven.util.QueryConstants;
import io.deephaven.util.type.ArrayTypeUtils;
import io.deephaven.chunk.attributes.Any;

Expand Down Expand Up @@ -77,6 +78,12 @@ public final double get(int index) {
return data[offset + index];
}

// region isNull
public final boolean isNull(int index) {
return data[offset + index] == QueryConstants.NULL_DOUBLE;
}
// endregion isNull

@Override
public DoubleChunk<ATTR> slice(int offset, int capacity) {
ChunkHelpers.checkSliceArgs(size, offset, capacity);
Expand Down
7 changes: 7 additions & 0 deletions engine/chunk/src/main/java/io/deephaven/chunk/FloatChunk.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
// @formatter:off
package io.deephaven.chunk;

import io.deephaven.util.QueryConstants;
import io.deephaven.util.type.ArrayTypeUtils;
import io.deephaven.chunk.attributes.Any;

Expand Down Expand Up @@ -77,6 +78,12 @@ public final float get(int index) {
return data[offset + index];
}

// region isNull
public final boolean isNull(int index) {
return data[offset + index] == QueryConstants.NULL_FLOAT;
}
// endregion isNull

@Override
public FloatChunk<ATTR> slice(int offset, int capacity) {
ChunkHelpers.checkSliceArgs(size, offset, capacity);
Expand Down
7 changes: 7 additions & 0 deletions engine/chunk/src/main/java/io/deephaven/chunk/IntChunk.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
// @formatter:off
package io.deephaven.chunk;

import io.deephaven.util.QueryConstants;
import io.deephaven.util.type.ArrayTypeUtils;
import io.deephaven.chunk.attributes.Any;

Expand Down Expand Up @@ -77,6 +78,12 @@ public final int get(int index) {
return data[offset + index];
}

// region isNull
public final boolean isNull(int index) {
return data[offset + index] == QueryConstants.NULL_INT;
}
// endregion isNull

@Override
public IntChunk<ATTR> slice(int offset, int capacity) {
ChunkHelpers.checkSliceArgs(size, offset, capacity);
Expand Down
7 changes: 7 additions & 0 deletions engine/chunk/src/main/java/io/deephaven/chunk/LongChunk.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
// @formatter:off
package io.deephaven.chunk;

import io.deephaven.util.QueryConstants;
import io.deephaven.util.type.ArrayTypeUtils;
import io.deephaven.chunk.attributes.Any;

Expand Down Expand Up @@ -77,6 +78,12 @@ public final long get(int index) {
return data[offset + index];
}

// region isNull
public final boolean isNull(int index) {
return data[offset + index] == QueryConstants.NULL_LONG;
}
// endregion isNull

@Override
public LongChunk<ATTR> slice(int offset, int capacity) {
ChunkHelpers.checkSliceArgs(size, offset, capacity);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
// @formatter:off
package io.deephaven.chunk;

import io.deephaven.util.QueryConstants;
import io.deephaven.util.type.ArrayTypeUtils;
import io.deephaven.chunk.attributes.Any;

Expand Down Expand Up @@ -77,6 +78,12 @@ public final T get(int index) {
return data[offset + index];
}

// region isNull
public final boolean isNull(int index) {
return data[offset + index] == null;
}
// endregion isNull

@Override
public ObjectChunk<T, ATTR> slice(int offset, int capacity) {
ChunkHelpers.checkSliceArgs(size, offset, capacity);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
* {@link Chunk} that may have its backing storage reset to a slice of that belonging to another {@link Chunk} or a
* native array.
*/
public interface ResettableReadOnlyChunk<ATTR_BASE extends Any> extends ResettableChunk<ATTR_BASE>, PoolableChunk {
public interface ResettableReadOnlyChunk<ATTR_BASE extends Any>
extends ResettableChunk<ATTR_BASE>, PoolableChunk<ATTR_BASE> {

/**
* Reset the data and bounds of this chunk to a range or sub-range of the specified {@link Chunk}.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
* {@link WritableChunk} or a native array.
*/
public interface ResettableWritableChunk<ATTR_BASE extends Any>
extends ResettableChunk<ATTR_BASE>, WritableChunk<ATTR_BASE>, PoolableChunk {
extends ResettableChunk<ATTR_BASE>, WritableChunk<ATTR_BASE>, PoolableChunk<ATTR_BASE> {

@Override
<ATTR extends ATTR_BASE> WritableChunk<ATTR> resetFromChunk(WritableChunk<ATTR> other, int offset, int capacity);
Expand Down
7 changes: 7 additions & 0 deletions engine/chunk/src/main/java/io/deephaven/chunk/ShortChunk.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
// @formatter:off
package io.deephaven.chunk;

import io.deephaven.util.QueryConstants;
import io.deephaven.util.type.ArrayTypeUtils;
import io.deephaven.chunk.attributes.Any;

Expand Down Expand Up @@ -77,6 +78,12 @@ public final short get(int index) {
return data[offset + index];
}

// region isNull
public final boolean isNull(int index) {
return data[offset + index] == QueryConstants.NULL_SHORT;
}
// endregion isNull

@Override
public ShortChunk<ATTR> slice(int offset, int capacity) {
ChunkHelpers.checkSliceArgs(size, offset, capacity);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
*
* @param <ATTR> Descriptive attribute that applies to the elements stored within this WritableChunk
*/
public interface WritableChunk<ATTR extends Any> extends Chunk<ATTR>, PoolableChunk {
public interface WritableChunk<ATTR extends Any> extends Chunk<ATTR>, PoolableChunk<ATTR> {
@Override
WritableChunk<ATTR> slice(int offset, int capacity);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public WritableChunk<T> get() {

/**
* Ensure the underlying chunk has a capacity of at least {@code capacity}.
*
* <p>
* The data and size of the returned chunk are undefined.
*
* @param capacity the minimum capacity for the chunk.
Expand All @@ -56,9 +56,9 @@ public WritableChunk<T> ensureCapacity(int capacity) {

/**
* Ensure the underlying chunk has a capacity of at least {@code capacity}.
*
* <p>
* If the chunk has existing data, then it is copied to the new chunk.
*
* <p>
* If the underlying chunk already exists, then the size of the chunk is the original size. If the chunk did not
* exist, then the size of the returned chunk is zero.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
package io.deephaven.chunk.util.pools;

import io.deephaven.chunk.Chunk;
import io.deephaven.chunk.attributes.Any;
import io.deephaven.util.SafeCloseable;

/**
* Marker interface for {@link Chunk} subclasses that can be kept with in a {@link ChunkPool}, and whose
* {@link #close()} method will return them to the appropriate pool.
*/
public interface PoolableChunk extends SafeCloseable {
public interface PoolableChunk<ATTR extends Any> extends Chunk<ATTR>, SafeCloseable {
}
Original file line number Diff line number Diff line change
Expand Up @@ -360,6 +360,15 @@ public enum CopyAttributeOperation {
CopyAttributeOperation.Flatten, // add flatten for now because web flattens all views
CopyAttributeOperation.Preview));

tempMap.put(BARRAGE_SCHEMA_ATTRIBUTE, EnumSet.of(
CopyAttributeOperation.Filter,
CopyAttributeOperation.FirstBy,
CopyAttributeOperation.Flatten,
CopyAttributeOperation.LastBy,
CopyAttributeOperation.PartitionBy,
CopyAttributeOperation.Reverse,
CopyAttributeOperation.Sort));

attributeToCopySet = Collections.unmodifiableMap(tempMap);
}

Expand Down
Loading

0 comments on commit 09544e2

Please sign in to comment.