diff --git a/build.gradle b/build.gradle index efc786ed1a..39f49daf9d 100644 --- a/build.gradle +++ b/build.gradle @@ -163,6 +163,15 @@ allprojects { test.dependsOn(":plugins:assemblePlugins") } +def getCurrentBranch() { + def branch = "" + def proc = "git rev-parse --abbrev-ref HEAD".execute() + proc.in.eachLine { line -> branch = line } + proc.err.eachLine { line -> println line } + proc.waitFor() + branch +} + idea { project { diff --git a/core/src/main/java/org/polypheny/db/adapter/Scannable.java b/core/src/main/java/org/polypheny/db/adapter/Scannable.java index 3d56a6e91c..7793c2ada8 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Scannable.java +++ b/core/src/main/java/org/polypheny/db/adapter/Scannable.java @@ -63,9 +63,9 @@ static PhysicalTable createSubstitutionTable( Scannable scannable, Context conte AllocationTable allocSubTable = new AllocationTable( builder.getNewAllocId(), allocation.placementId, allocation.partitionId, table.id, table.namespaceId, allocation.adapterId ); List allocColumns = new ArrayList<>(); - i = 1; + for ( LogicalColumn column : columns ) { - AllocationColumn alloc = new AllocationColumn( logical.namespaceId, allocSubTable.placementId, allocSubTable.logicalId, column.id, PlacementType.AUTOMATIC, i++, allocation.adapterId ); + AllocationColumn alloc = new AllocationColumn( logical.namespaceId, allocSubTable.placementId, allocSubTable.logicalId, column.id, PlacementType.AUTOMATIC, column.position, allocation.adapterId ); allocColumns.add( alloc ); } // we use first as pk diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableScan.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableScan.java index d2ae9520d0..ed5a4c8794 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableScan.java @@ -38,6 +38,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.PolyphenyMode; import org.polypheny.db.catalog.entity.CatalogEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.interpreter.Row; @@ -255,7 +256,7 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { @Override public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - if ( Catalog.testMode ) { + if ( Catalog.mode == PolyphenyMode.TEST ) { // normally this enumerable is not used by Polypheny and is therefore "removed" by an infinite cost, // but theoretically it is able to handle scans on the application layer // this is tested by different instances and should then lead to a finite selfCost diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index affc201142..797b2e54a7 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -68,7 +68,7 @@ public abstract class Catalog implements ExtensionPoint { private static Catalog INSTANCE = null; public static boolean resetCatalog; public static boolean memoryCatalog; - public static boolean testMode; + public static PolyphenyMode mode; public static final Expression CATALOG_EXPRESSION = Expressions.call( Catalog.class, "getInstance" ); @@ -275,4 +275,11 @@ public static Snapshot snapshot() { public abstract void restore(); + public enum PolyphenyMode { + DEFAULT, + TEST, + BENCHMARK + } + + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/RelStoreCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/RelStoreCatalog.java index b9ce2ed877..6701f34624 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/RelStoreCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/RelStoreCatalog.java @@ -17,6 +17,7 @@ package org.polypheny.db.catalog.catalogs; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Streams; import io.activej.serializer.BinarySerializer; import io.activej.serializer.annotations.Deserialize; import java.util.ArrayList; @@ -105,7 +106,7 @@ public PhysicalColumn getColumn( long id, long allocId ) { public PhysicalTable createTable( String namespaceName, String tableName, Map columnNames, LogicalTable logical, Map lColumns, AllocationTableWrapper wrapper ) { AllocationTable allocation = wrapper.table; List columns = wrapper.columns; - List pColumns = columns.stream().map( c -> new PhysicalColumn( columnNames.get( c.columnId ), logical.id, allocation.id, allocation.adapterId, c.position, lColumns.get( c.columnId ) ) ).collect( Collectors.toList() ); + List pColumns = Streams.mapWithIndex( columns.stream(), ( c, i ) -> new PhysicalColumn( columnNames.get( c.columnId ), logical.id, allocation.id, allocation.adapterId, (int) i, lColumns.get( c.columnId ) ) ).collect( Collectors.toList() ); PhysicalTable table = new PhysicalTable( IdBuilder.getInstance().getNewPhysicalId(), allocation.id, allocation.logicalId, tableName, pColumns, logical.namespaceId, namespaceName, allocation.adapterId ); pColumns.forEach( this::addColumn ); addPhysical( allocation, table ); @@ -117,7 +118,7 @@ public PhysicalColumn addColumn( String name, long allocId, int position, Logica PhysicalColumn column = new PhysicalColumn( name, lColumn.tableId, allocId, adapterId, position, lColumn ); PhysicalTable table = fromAllocation( allocId ); List columns = new ArrayList<>( table.columns ); - columns.add( position - 1, column ); + columns.add( position, column ); addColumn( column ); addPhysical( getAlloc( table.allocationId ), table.toBuilder().columns( ImmutableList.copyOf( columns ) ).build() ); return column; diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index daa9ca2576..81ead395a2 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -196,7 +196,7 @@ public static DdlManager getInstance() { * @param dataStore the data store on which to create the placement * @param statement the query statement */ - public abstract void addDataPlacement( LogicalTable table, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ); + public abstract void createAllocationPlacement( LogicalTable table, List columnIds, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ); /** * Adds a new primary key to a table diff --git a/core/src/main/java/org/polypheny/db/docker/DockerContainer.java b/core/src/main/java/org/polypheny/db/docker/DockerContainer.java index 6ad82ff94b..6776acb979 100644 --- a/core/src/main/java/org/polypheny/db/docker/DockerContainer.java +++ b/core/src/main/java/org/polypheny/db/docker/DockerContainer.java @@ -39,6 +39,7 @@ import org.bouncycastle.tls.TlsFatalAlert; import org.bouncycastle.tls.TlsNoCloseNotifyException; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.PolyphenyMode; import org.polypheny.db.config.RuntimeConfig; /** @@ -125,11 +126,11 @@ public int execute( List cmd ) throws IOException { public static String getPhysicalUniqueName( String uniqueName ) { // while not all Docker containers belong to an adapter we annotate it anyway String name = "polypheny_" + RuntimeConfig.INSTANCE_UUID.getString() + "_" + uniqueName; - if ( !Catalog.testMode ) { + if ( Catalog.mode != PolyphenyMode.TEST ) { return name; - } else { - return name + "_test"; } + return name + "_test"; + } diff --git a/core/src/main/java/org/polypheny/db/docker/DockerManager.java b/core/src/main/java/org/polypheny/db/docker/DockerManager.java index 6067d2d490..b5c1002dc2 100644 --- a/core/src/main/java/org/polypheny/db/docker/DockerManager.java +++ b/core/src/main/java/org/polypheny/db/docker/DockerManager.java @@ -27,6 +27,7 @@ import java.util.stream.Collectors; import javax.annotation.Nullable; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.PolyphenyMode; import org.polypheny.db.config.Config.ConfigListener; import org.polypheny.db.config.ConfigDocker; import org.polypheny.db.config.ConfigManager; @@ -51,7 +52,7 @@ public static DockerManager getInstance() { public Optional getInstanceById( int instanceId ) { // Tests expect a localhost docker instance with id 0 - if ( Catalog.testMode && instanceId == 0 ) { + if ( Catalog.mode == PolyphenyMode.TEST && instanceId == 0 ) { return dockerInstances.values().stream().filter( d -> d.getHost().equals( "localhost" ) ).findFirst(); } return Optional.ofNullable( dockerInstances.get( instanceId ) ); diff --git a/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java b/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java index 566becf85b..5ee99ef4ed 100644 --- a/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java +++ b/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java @@ -1393,10 +1393,14 @@ private AlgSubset registerImpl( AlgNode alg, AlgSet set ) { //log.warn( "size is: " + provenanceMap.size() ); // Record its provenance. (Rule call may be null.) if ( ruleCallStack.isEmpty() ) { - //provenanceMap.put( alg, Provenance.EMPTY ); + if ( LOGGER.isDebugEnabled() ) { + provenanceMap.put( alg, Provenance.EMPTY ); + } } else { final VolcanoRuleCall ruleCall = ruleCallStack.peek(); - //provenanceMap.put( alg, new RuleProvenance( ruleCall.rule, ImmutableList.copyOf( ruleCall.algs ), ruleCall.id ) ); + if ( LOGGER.isDebugEnabled() ) { + provenanceMap.put( alg, new RuleProvenance( ruleCall.rule, ImmutableList.copyOf( ruleCall.algs ), ruleCall.id ) ); + } } // If it is equivalent to an existing expression, return the set that the equivalent expression belongs to. diff --git a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java index 3b195731a6..28ac057ab8 100644 --- a/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java +++ b/dbms/src/main/java/org/polypheny/db/PolyphenyDb.java @@ -42,10 +42,12 @@ import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.adapter.java.AdapterTemplate; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.PolyphenyMode; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.impl.PolyCatalog; import org.polypheny.db.catalog.logistic.NamespaceType; +import org.polypheny.db.cli.PolyphenyModesConverter; import org.polypheny.db.config.ConfigManager; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.ddl.DdlManager; @@ -111,8 +113,8 @@ public class PolyphenyDb { @Option(name = { "-memoryCatalog" }, description = "Store catalog only in-memory") public boolean memoryCatalog = false; - @Option(name = { "-testMode" }, description = "Special catalog configuration for running tests") - public boolean testMode = false; + @Option(name = { "-mode" }, description = "Special system configuration for running tests", typeConverterProvider = PolyphenyModesConverter.class) + public PolyphenyMode mode = PolyphenyMode.DEFAULT; @Option(name = { "-gui" }, description = "Show splash screen on startup and add taskbar gui") public boolean desktopMode = false; @@ -181,7 +183,7 @@ public void runPolyphenyDb() { } // Configuration shall not be persisted - ConfigManager.memoryMode = (testMode || memoryCatalog); + ConfigManager.memoryMode = (mode == PolyphenyMode.TEST || memoryCatalog); ConfigManager.resetCatalogOnStartup = resetCatalog; // Select behavior depending on arguments @@ -259,7 +261,7 @@ public void runPolyphenyDb() { } // Backup content of Polypheny folder - if ( testMode || memoryCatalog ) { + if ( mode == PolyphenyMode.TEST || memoryCatalog ) { if ( phdm.checkIfExists( "_test_backup" ) ) { throw new GenericRuntimeException( "Unable to backup the Polypheny folder since there is already a backup folder." ); } @@ -305,7 +307,7 @@ public void runPolyphenyDb() { } } - if ( testMode ) { + if ( mode == PolyphenyMode.TEST ) { uuid = "polypheny-test"; } @@ -372,7 +374,6 @@ public void join( final long millis ) throws InterruptedException { new ConfigService( server.getServer() ); new InformationService( server.getServer() ); - try { new JavaInformation(); } catch ( Exception e ) { @@ -385,12 +386,12 @@ public void join( final long millis ) throws InterruptedException { } if ( AutoDocker.getInstance().isAvailable() ) { - if ( testMode ) { + if ( mode == PolyphenyMode.TEST ) { resetDocker = true; Catalog.resetDocker = true; } boolean success = AutoDocker.getInstance().doAutoConnect(); - if ( testMode && !success ) { + if ( mode == PolyphenyMode.TEST && !success ) { // AutoDocker does not work in Windows containers if ( !System.getenv( "RUNNER_OS" ).equals( "Windows" ) ) { log.error( "Failed to connect to docker instance" ); @@ -443,7 +444,7 @@ public void join( final long millis ) throws InterruptedException { DdlManager.setAndGetInstance( new DdlManagerImpl( catalog ) ); // Add config and monitoring test page for UI testing - if ( testMode ) { + if ( mode == PolyphenyMode.TEST ) { new UiTestingConfigPage(); new UiTestingMonitoringPage(); } @@ -521,7 +522,7 @@ private HttpServer startHttpServer( Authenticator authenticator, TransactionMana private Catalog startCatalog() { Catalog.resetCatalog = resetCatalog; Catalog.memoryCatalog = memoryCatalog; - Catalog.testMode = testMode; + Catalog.mode = mode; Catalog.resetDocker = resetDocker; Catalog catalog = Catalog.setAndGetInstance( new PolyCatalog() ); if ( catalog == null ) { @@ -535,7 +536,7 @@ private Catalog startCatalog() { private void restore( Authenticator authenticator, Catalog catalog ) { PolyPluginManager.startUp( transactionManager, authenticator ); - if ( !resetCatalog && !testMode ) { + if ( !resetCatalog && mode != PolyphenyMode.TEST ) { Catalog.getInstance().restore(); } Catalog.getInstance().updateSnapshot(); diff --git a/dbms/src/main/java/org/polypheny/db/cli/PolyphenyModesConverter.java b/dbms/src/main/java/org/polypheny/db/cli/PolyphenyModesConverter.java new file mode 100644 index 0000000000..7b702621b6 --- /dev/null +++ b/dbms/src/main/java/org/polypheny/db/cli/PolyphenyModesConverter.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.cli; + +import com.github.rvesse.airline.types.DefaultTypeConverter; +import java.util.Arrays; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.catalog.Catalog.PolyphenyMode; + +@Slf4j +public class PolyphenyModesConverter extends DefaultTypeConverter { + + @Override + public Object convert( String name, Class type, String value ) { + String adjustedName = name.toUpperCase(); + + if ( Arrays.stream( PolyphenyMode.values() ).anyMatch( v -> v.name().equals( adjustedName ) ) ) { + return PolyphenyMode.valueOf( adjustedName ); + } + + switch ( adjustedName ) { + case "T": + return PolyphenyMode.TEST; + case "B": + return PolyphenyMode.BENCHMARK; + case "D": + return PolyphenyMode.DEFAULT; + } + log.warn( "Could not find the mode: " + adjustedName ); + return PolyphenyMode.DEFAULT; + } + +} diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index d6faec3eb0..9395ff1535 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -710,7 +710,7 @@ public void createPolyphenyIndex( LogicalTable table, String indexMethodName, Li @Override - public void addDataPlacement( LogicalTable table, List newColumns, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ) { + public void createAllocationPlacement( LogicalTable table, List newColumns, List partitionGroupIds, List partitionGroupNames, DataStore dataStore, Statement statement ) { // check if allocation already exists if ( catalog.getSnapshot().alloc().getPlacement( dataStore.getAdapterId(), table.id ).isPresent() ) { @@ -732,7 +732,7 @@ public void addDataPlacement( LogicalTable table, List newColumns AllocationPlacement placement = catalog.getAllocRel( table.namespaceId ).addPlacement( table.id, table.namespaceId, dataStore.adapterId ); PartitionProperty property = catalog.getSnapshot().alloc().getPartitionProperty( table.id ).orElseThrow(); - addAllocationsForPlacement( table.namespaceId, statement, table, placement.id, adjustedColumns, property.partitionIds, primaryKey.columnIds, dataStore ); + addAllocationsForPlacement( table.namespaceId, statement, table, placement.id, adjustedColumns, primaryKey.columnIds, property.partitionIds, dataStore ); Catalog.getInstance().updateSnapshot(); @@ -1817,7 +1817,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a for ( DataStore store : stores ) { AllocationPlacement placement = catalog.getAllocRel( namespaceId ).addPlacement( view.id, namespaceId, store.adapterId ); - addAllocationsForPlacement( namespaceId, statement, view, placement.id, List.copyOf( ids.values() ), List.of( partition.id ), List.of(), store ); + addAllocationsForPlacement( namespaceId, statement, view, placement.id, List.copyOf( ids.values() ), List.of(), List.of( partition.id ), store ); } addBlankPartition( namespaceId, view.id, List.of( group.id ), List.of( partition.id ) ); @@ -2133,8 +2133,9 @@ private Pair createSinglePartition( long private List addAllocationsForPlacement( long namespaceId, Statement statement, LogicalTable logical, long placementId, List lColumns, List pkIds, List partitionIds, Adapter adapter ) { List columns = new ArrayList<>(); - for ( LogicalColumn column : lColumns ) { - columns.add( catalog.getAllocRel( namespaceId ).addColumn( placementId, logical.id, column.id, adapter.adapterId, PlacementType.AUTOMATIC, column.position ) ); + int i = 0; + for ( LogicalColumn column : sortByPosition( lColumns ) ) { + columns.add( catalog.getAllocRel( namespaceId ).addColumn( placementId, logical.id, column.id, adapter.adapterId, PlacementType.AUTOMATIC, i++ ) ); } buildNamespace( namespaceId, logical, adapter ); @@ -2146,7 +2147,7 @@ private List addAllocationsForPlacement( long namespaceId, Stat } - private PartitionProperty addBlankPartition( long namespaceId, long logicalEntityId, List groupIds, List allocIds ) { + private PartitionProperty addBlankPartition( long namespaceId, long logicalEntityId, List groupIds, List partitionIds ) { //LogicalPartitionGroup defaultUnpartitionedGroup = catalog.getAllocRel( namespaceId ).addPartitionGroup( logicalEntityId, "full", namespaceId, PartitionType.NONE, 1, List.of(), true ); PartitionProperty partitionProperty = PartitionProperty.builder() @@ -2154,7 +2155,7 @@ private PartitionProperty addBlankPartition( long namespaceId, long logicalEntit .partitionType( PartitionType.NONE ) .isPartitioned( false ) .partitionGroupIds( ImmutableList.copyOf( groupIds ) ) - .partitionIds( ImmutableList.copyOf( allocIds ) ) + .partitionIds( ImmutableList.copyOf( partitionIds ) ) .reliesOnPeriodicChecks( false ) .build(); @@ -2166,7 +2167,7 @@ private PartitionProperty addBlankPartition( long namespaceId, long logicalEntit private AllocationTable addAllocationTable( long namespaceId, Statement statement, LogicalTable logical, List lColumns, List pkIds, long placementId, long partitionId, List aColumns, Adapter adapter ) { AllocationTable alloc = catalog.getAllocRel( namespaceId ).addAllocation( adapter.adapterId, placementId, partitionId, logical.id ); - adapter.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( logical, sortByPosition( lColumns ), pkIds ), AllocationTableWrapper.of( alloc, sortByPositionAlloc( aColumns ) ) ); + adapter.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( logical, sortByPosition( lColumns ), pkIds ), AllocationTableWrapper.of( alloc, aColumns ) ); return alloc; } @@ -2177,12 +2178,6 @@ private static List sortByPosition( List columns ) } - @NotNull - private static List sortByPositionAlloc( List columns ) { - return columns.stream().sorted( Comparator.comparingInt( a -> a.position ) ).collect( Collectors.toList() ); - } - - private void buildNamespace( long namespaceId, LogicalTable logical, Adapter store ) { store.updateNamespace( logical.getNamespaceName(), namespaceId ); } @@ -2673,6 +2668,7 @@ private Pair, PartitionProperty> addGroupsAndPartition return Pair.of( partitions, partitionProperty ); } + @Override public void dropTablePartition( LogicalTable table, Statement statement ) throws TransactionException { long tableId = table.id; diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java index dae6dd9fea..6f1e1112df 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/AbstractDqlRouter.java @@ -41,6 +41,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.NamespaceType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.rex.RexBuilder; @@ -191,7 +192,7 @@ protected List buildSelect( AlgNode node, List { - polyphenyDb.testMode = true; + polyphenyDb.mode = PolyphenyMode.TEST; String defaultStoreName = System.getProperty( "storeId.default" ); if ( defaultStoreName != null ) { polyphenyDb.defaultStoreName = defaultStoreName; diff --git a/gradle.properties b/gradle.properties index b5d266c402..a3afe3baa9 100644 --- a/gradle.properties +++ b/gradle.properties @@ -25,7 +25,7 @@ org.gradle.jvmargs = -Xmx6g -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF- # Dependency versions activej_serializer_version = 5.5 -airline_version = 2.8.0 +airline_version = 2.9.0 asm_version = 9.4 avatica_core_version = 1.17.2-POLYPHENY avatica_server_version = 1.17.2-POLYPHENY diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index 24d8ac95f9..d36cca1f77 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -92,7 +92,7 @@ public CsvTable createCsvTable( long id, PhysicalTable table, CsvSource csvSourc for ( PhysicalColumn column : table.getColumns() ) { AlgDataType sqlType = sqlType( typeFactory, column.type, column.length, column.scale, null ); - fieldInfo.add( column.id, column.name, columns.get( column.position - 1 ).physicalColumnName, sqlType ).nullable( column.nullable ); + fieldInfo.add( column.id, column.name, columns.get( column.position ).physicalColumnName, sqlType ).nullable( column.nullable ); fieldTypes.add( CsvFieldType.getCsvFieldType( column.type ) ); fieldIds.add( column.position ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 1ff125f6dd..a511e07357 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -18,6 +18,7 @@ import java.sql.SQLException; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -207,7 +208,8 @@ protected StringBuilder buildCreateTableQuery( PhysicalTable table ) { public void addColumn( Context context, long allocId, LogicalColumn logicalColumn ) { String physicalColumnName = getPhysicalColumnName( logicalColumn.id ); PhysicalTable table = storeCatalog.fromAllocation( allocId ); - PhysicalColumn column = storeCatalog.addColumn( physicalColumnName, allocId, table.columns.size(), logicalColumn ); + int max = storeCatalog.getColumns( allocId ).stream().max( Comparator.comparingInt( a -> a.position ) ).orElseThrow().position; + PhysicalColumn column = storeCatalog.addColumn( physicalColumnName, allocId, max + 1, logicalColumn ); StringBuilder query = buildAddColumnQuery( table, column ); executeUpdate( query, context ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java index 2d46509e64..1b65fa72b4 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterTableAddPlacement.java @@ -141,7 +141,7 @@ public void execute( Context context, Statement statement, QueryParameters param columns.addAll( statement.getTransaction().getSnapshot().rel().getColumns( table.id ) ); } - DdlManager.getInstance().addDataPlacement( + DdlManager.getInstance().createAllocationPlacement( table, columns, partitionGroupsList, diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java index e7f48e8f38..58bedc2e9c 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/PlannerTest.java @@ -124,7 +124,7 @@ public class PlannerTest extends SqlLanguageDependent { static { - Catalog.testMode = true; + Catalog.mode = true; } diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java index b8d822d052..5e6aa51d6f 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/util/PlannerImplMock.java @@ -70,7 +70,7 @@ public class PlannerImplMock implements Planner { static { - Catalog.testMode = true; + Catalog.mode = true; } diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index e415f29b04..2425964028 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -37,6 +37,7 @@ import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.PolyphenyMode; import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.logical.LogicalCollection; @@ -321,7 +322,7 @@ private static DocResult getDocResult( Statement statement, QueryLanguage langua private static String toJson( @Nullable PolyValue src ) { return src == null ? null - : Catalog.testMode ? src.toTypedJson() : src.toJson(); + : Catalog.mode == PolyphenyMode.TEST ? src.toTypedJson() : src.toJson(); }