Skip to content

Commit

Permalink
fixing vertical partitioning, partial fix horizontal
Browse files Browse the repository at this point in the history
  • Loading branch information
datomo committed Nov 1, 2023
1 parent 121e262 commit d217d5e
Show file tree
Hide file tree
Showing 21 changed files with 120 additions and 47 deletions.
9 changes: 9 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,15 @@ allprojects {
test.dependsOn(":plugins:assemblePlugins")
}

def getCurrentBranch() {
def branch = ""
def proc = "git rev-parse --abbrev-ref HEAD".execute()
proc.in.eachLine { line -> branch = line }
proc.err.eachLine { line -> println line }
proc.waitFor()
branch
}


idea {
project {
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/org/polypheny/db/adapter/Scannable.java
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,9 @@ static PhysicalTable createSubstitutionTable( Scannable scannable, Context conte
AllocationTable allocSubTable = new AllocationTable( builder.getNewAllocId(), allocation.placementId, allocation.partitionId, table.id, table.namespaceId, allocation.adapterId );

List<AllocationColumn> allocColumns = new ArrayList<>();
i = 1;

for ( LogicalColumn column : columns ) {
AllocationColumn alloc = new AllocationColumn( logical.namespaceId, allocSubTable.placementId, allocSubTable.logicalId, column.id, PlacementType.AUTOMATIC, i++, allocation.adapterId );
AllocationColumn alloc = new AllocationColumn( logical.namespaceId, allocSubTable.placementId, allocSubTable.logicalId, column.id, PlacementType.AUTOMATIC, column.position, allocation.adapterId );
allocColumns.add( alloc );
}
// we use first as pk
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import org.polypheny.db.algebra.type.AlgDataType;
import org.polypheny.db.algebra.type.AlgDataTypeField;
import org.polypheny.db.catalog.Catalog;
import org.polypheny.db.catalog.Catalog.PolyphenyMode;
import org.polypheny.db.catalog.entity.CatalogEntity;
import org.polypheny.db.catalog.entity.physical.PhysicalTable;
import org.polypheny.db.interpreter.Row;
Expand Down Expand Up @@ -255,7 +256,7 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) {

@Override
public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) {
if ( Catalog.testMode ) {
if ( Catalog.mode == PolyphenyMode.TEST ) {
// normally this enumerable is not used by Polypheny and is therefore "removed" by an infinite cost,
// but theoretically it is able to handle scans on the application layer
// this is tested by different instances and should then lead to a finite selfCost
Expand Down
9 changes: 8 additions & 1 deletion core/src/main/java/org/polypheny/db/catalog/Catalog.java
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ public abstract class Catalog implements ExtensionPoint {
private static Catalog INSTANCE = null;
public static boolean resetCatalog;
public static boolean memoryCatalog;
public static boolean testMode;
public static PolyphenyMode mode;

public static final Expression CATALOG_EXPRESSION = Expressions.call( Catalog.class, "getInstance" );

Expand Down Expand Up @@ -275,4 +275,11 @@ public static Snapshot snapshot() {
public abstract void restore();


public enum PolyphenyMode {
DEFAULT,
TEST,
BENCHMARK
}


}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package org.polypheny.db.catalog.catalogs;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.Streams;
import io.activej.serializer.BinarySerializer;
import io.activej.serializer.annotations.Deserialize;
import java.util.ArrayList;
Expand Down Expand Up @@ -105,7 +106,7 @@ public PhysicalColumn getColumn( long id, long allocId ) {
public PhysicalTable createTable( String namespaceName, String tableName, Map<Long, String> columnNames, LogicalTable logical, Map<Long, LogicalColumn> lColumns, AllocationTableWrapper wrapper ) {
AllocationTable allocation = wrapper.table;
List<AllocationColumn> columns = wrapper.columns;
List<PhysicalColumn> pColumns = columns.stream().map( c -> new PhysicalColumn( columnNames.get( c.columnId ), logical.id, allocation.id, allocation.adapterId, c.position, lColumns.get( c.columnId ) ) ).collect( Collectors.toList() );
List<PhysicalColumn> pColumns = Streams.mapWithIndex( columns.stream(), ( c, i ) -> new PhysicalColumn( columnNames.get( c.columnId ), logical.id, allocation.id, allocation.adapterId, (int) i, lColumns.get( c.columnId ) ) ).collect( Collectors.toList() );
PhysicalTable table = new PhysicalTable( IdBuilder.getInstance().getNewPhysicalId(), allocation.id, allocation.logicalId, tableName, pColumns, logical.namespaceId, namespaceName, allocation.adapterId );
pColumns.forEach( this::addColumn );
addPhysical( allocation, table );
Expand All @@ -117,7 +118,7 @@ public PhysicalColumn addColumn( String name, long allocId, int position, Logica
PhysicalColumn column = new PhysicalColumn( name, lColumn.tableId, allocId, adapterId, position, lColumn );
PhysicalTable table = fromAllocation( allocId );
List<PhysicalColumn> columns = new ArrayList<>( table.columns );
columns.add( position - 1, column );
columns.add( position, column );
addColumn( column );
addPhysical( getAlloc( table.allocationId ), table.toBuilder().columns( ImmutableList.copyOf( columns ) ).build() );
return column;
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/java/org/polypheny/db/ddl/DdlManager.java
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ public static DdlManager getInstance() {
* @param dataStore the data store on which to create the placement
* @param statement the query statement
*/
public abstract void addDataPlacement( LogicalTable table, List<LogicalColumn> columnIds, List<Integer> partitionGroupIds, List<String> partitionGroupNames, DataStore<?> dataStore, Statement statement );
public abstract void createAllocationPlacement( LogicalTable table, List<LogicalColumn> columnIds, List<Integer> partitionGroupIds, List<String> partitionGroupNames, DataStore<?> dataStore, Statement statement );

/**
* Adds a new primary key to a table
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.bouncycastle.tls.TlsFatalAlert;
import org.bouncycastle.tls.TlsNoCloseNotifyException;
import org.polypheny.db.catalog.Catalog;
import org.polypheny.db.catalog.Catalog.PolyphenyMode;
import org.polypheny.db.config.RuntimeConfig;

/**
Expand Down Expand Up @@ -125,11 +126,11 @@ public int execute( List<String> cmd ) throws IOException {
public static String getPhysicalUniqueName( String uniqueName ) {
// while not all Docker containers belong to an adapter we annotate it anyway
String name = "polypheny_" + RuntimeConfig.INSTANCE_UUID.getString() + "_" + uniqueName;
if ( !Catalog.testMode ) {
if ( Catalog.mode != PolyphenyMode.TEST ) {
return name;
} else {
return name + "_test";
}
return name + "_test";

}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import org.polypheny.db.catalog.Catalog;
import org.polypheny.db.catalog.Catalog.PolyphenyMode;
import org.polypheny.db.config.Config.ConfigListener;
import org.polypheny.db.config.ConfigDocker;
import org.polypheny.db.config.ConfigManager;
Expand All @@ -51,7 +52,7 @@ public static DockerManager getInstance() {

public Optional<DockerInstance> getInstanceById( int instanceId ) {
// Tests expect a localhost docker instance with id 0
if ( Catalog.testMode && instanceId == 0 ) {
if ( Catalog.mode == PolyphenyMode.TEST && instanceId == 0 ) {
return dockerInstances.values().stream().filter( d -> d.getHost().equals( "localhost" ) ).findFirst();
}
return Optional.ofNullable( dockerInstances.get( instanceId ) );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1393,10 +1393,14 @@ private AlgSubset registerImpl( AlgNode alg, AlgSet set ) {
//log.warn( "size is: " + provenanceMap.size() );
// Record its provenance. (Rule call may be null.)
if ( ruleCallStack.isEmpty() ) {
//provenanceMap.put( alg, Provenance.EMPTY );
if ( LOGGER.isDebugEnabled() ) {
provenanceMap.put( alg, Provenance.EMPTY );
}
} else {
final VolcanoRuleCall ruleCall = ruleCallStack.peek();
//provenanceMap.put( alg, new RuleProvenance( ruleCall.rule, ImmutableList.copyOf( ruleCall.algs ), ruleCall.id ) );
if ( LOGGER.isDebugEnabled() ) {
provenanceMap.put( alg, new RuleProvenance( ruleCall.rule, ImmutableList.copyOf( ruleCall.algs ), ruleCall.id ) );
}
}

// If it is equivalent to an existing expression, return the set that the equivalent expression belongs to.
Expand Down
23 changes: 12 additions & 11 deletions dbms/src/main/java/org/polypheny/db/PolyphenyDb.java
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,12 @@
import org.polypheny.db.adapter.index.IndexManager;
import org.polypheny.db.adapter.java.AdapterTemplate;
import org.polypheny.db.catalog.Catalog;
import org.polypheny.db.catalog.Catalog.PolyphenyMode;
import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType;
import org.polypheny.db.catalog.exceptions.GenericRuntimeException;
import org.polypheny.db.catalog.impl.PolyCatalog;
import org.polypheny.db.catalog.logistic.NamespaceType;
import org.polypheny.db.cli.PolyphenyModesConverter;
import org.polypheny.db.config.ConfigManager;
import org.polypheny.db.config.RuntimeConfig;
import org.polypheny.db.ddl.DdlManager;
Expand Down Expand Up @@ -111,8 +113,8 @@ public class PolyphenyDb {
@Option(name = { "-memoryCatalog" }, description = "Store catalog only in-memory")
public boolean memoryCatalog = false;

@Option(name = { "-testMode" }, description = "Special catalog configuration for running tests")
public boolean testMode = false;
@Option(name = { "-mode" }, description = "Special system configuration for running tests", typeConverterProvider = PolyphenyModesConverter.class)
public PolyphenyMode mode = PolyphenyMode.DEFAULT;

@Option(name = { "-gui" }, description = "Show splash screen on startup and add taskbar gui")
public boolean desktopMode = false;
Expand Down Expand Up @@ -181,7 +183,7 @@ public void runPolyphenyDb() {
}

// Configuration shall not be persisted
ConfigManager.memoryMode = (testMode || memoryCatalog);
ConfigManager.memoryMode = (mode == PolyphenyMode.TEST || memoryCatalog);
ConfigManager.resetCatalogOnStartup = resetCatalog;

// Select behavior depending on arguments
Expand Down Expand Up @@ -259,7 +261,7 @@ public void runPolyphenyDb() {
}

// Backup content of Polypheny folder
if ( testMode || memoryCatalog ) {
if ( mode == PolyphenyMode.TEST || memoryCatalog ) {
if ( phdm.checkIfExists( "_test_backup" ) ) {
throw new GenericRuntimeException( "Unable to backup the Polypheny folder since there is already a backup folder." );
}
Expand Down Expand Up @@ -305,7 +307,7 @@ public void runPolyphenyDb() {
}
}

if ( testMode ) {
if ( mode == PolyphenyMode.TEST ) {
uuid = "polypheny-test";
}

Expand Down Expand Up @@ -372,7 +374,6 @@ public void join( final long millis ) throws InterruptedException {
new ConfigService( server.getServer() );
new InformationService( server.getServer() );


try {
new JavaInformation();
} catch ( Exception e ) {
Expand All @@ -385,12 +386,12 @@ public void join( final long millis ) throws InterruptedException {
}

if ( AutoDocker.getInstance().isAvailable() ) {
if ( testMode ) {
if ( mode == PolyphenyMode.TEST ) {
resetDocker = true;
Catalog.resetDocker = true;
}
boolean success = AutoDocker.getInstance().doAutoConnect();
if ( testMode && !success ) {
if ( mode == PolyphenyMode.TEST && !success ) {
// AutoDocker does not work in Windows containers
if ( !System.getenv( "RUNNER_OS" ).equals( "Windows" ) ) {
log.error( "Failed to connect to docker instance" );
Expand Down Expand Up @@ -443,7 +444,7 @@ public void join( final long millis ) throws InterruptedException {
DdlManager.setAndGetInstance( new DdlManagerImpl( catalog ) );

// Add config and monitoring test page for UI testing
if ( testMode ) {
if ( mode == PolyphenyMode.TEST ) {
new UiTestingConfigPage();
new UiTestingMonitoringPage();
}
Expand Down Expand Up @@ -521,7 +522,7 @@ private HttpServer startHttpServer( Authenticator authenticator, TransactionMana
private Catalog startCatalog() {
Catalog.resetCatalog = resetCatalog;
Catalog.memoryCatalog = memoryCatalog;
Catalog.testMode = testMode;
Catalog.mode = mode;
Catalog.resetDocker = resetDocker;
Catalog catalog = Catalog.setAndGetInstance( new PolyCatalog() );
if ( catalog == null ) {
Expand All @@ -535,7 +536,7 @@ private Catalog startCatalog() {
private void restore( Authenticator authenticator, Catalog catalog ) {
PolyPluginManager.startUp( transactionManager, authenticator );

if ( !resetCatalog && !testMode ) {
if ( !resetCatalog && mode != PolyphenyMode.TEST ) {
Catalog.getInstance().restore();
}
Catalog.getInstance().updateSnapshot();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* Copyright 2019-2023 The Polypheny Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.polypheny.db.cli;

import com.github.rvesse.airline.types.DefaultTypeConverter;
import java.util.Arrays;
import lombok.extern.slf4j.Slf4j;
import org.polypheny.db.catalog.Catalog.PolyphenyMode;

@Slf4j
public class PolyphenyModesConverter extends DefaultTypeConverter {

@Override
public Object convert( String name, Class<?> type, String value ) {
String adjustedName = name.toUpperCase();

if ( Arrays.stream( PolyphenyMode.values() ).anyMatch( v -> v.name().equals( adjustedName ) ) ) {
return PolyphenyMode.valueOf( adjustedName );
}

switch ( adjustedName ) {
case "T":
return PolyphenyMode.TEST;
case "B":
return PolyphenyMode.BENCHMARK;
case "D":
return PolyphenyMode.DEFAULT;
}
log.warn( "Could not find the mode: " + adjustedName );
return PolyphenyMode.DEFAULT;
}

}
24 changes: 10 additions & 14 deletions dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -710,7 +710,7 @@ public void createPolyphenyIndex( LogicalTable table, String indexMethodName, Li


@Override
public void addDataPlacement( LogicalTable table, List<LogicalColumn> newColumns, List<Integer> partitionGroupIds, List<String> partitionGroupNames, DataStore<?> dataStore, Statement statement ) {
public void createAllocationPlacement( LogicalTable table, List<LogicalColumn> newColumns, List<Integer> partitionGroupIds, List<String> partitionGroupNames, DataStore<?> dataStore, Statement statement ) {

// check if allocation already exists
if ( catalog.getSnapshot().alloc().getPlacement( dataStore.getAdapterId(), table.id ).isPresent() ) {
Expand All @@ -732,7 +732,7 @@ public void addDataPlacement( LogicalTable table, List<LogicalColumn> newColumns
AllocationPlacement placement = catalog.getAllocRel( table.namespaceId ).addPlacement( table.id, table.namespaceId, dataStore.adapterId );
PartitionProperty property = catalog.getSnapshot().alloc().getPartitionProperty( table.id ).orElseThrow();

addAllocationsForPlacement( table.namespaceId, statement, table, placement.id, adjustedColumns, property.partitionIds, primaryKey.columnIds, dataStore );
addAllocationsForPlacement( table.namespaceId, statement, table, placement.id, adjustedColumns, primaryKey.columnIds, property.partitionIds, dataStore );

Catalog.getInstance().updateSnapshot();

Expand Down Expand Up @@ -1817,7 +1817,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a
for ( DataStore<?> store : stores ) {
AllocationPlacement placement = catalog.getAllocRel( namespaceId ).addPlacement( view.id, namespaceId, store.adapterId );

addAllocationsForPlacement( namespaceId, statement, view, placement.id, List.copyOf( ids.values() ), List.of( partition.id ), List.of(), store );
addAllocationsForPlacement( namespaceId, statement, view, placement.id, List.copyOf( ids.values() ), List.of(), List.of( partition.id ), store );

}
addBlankPartition( namespaceId, view.id, List.of( group.id ), List.of( partition.id ) );
Expand Down Expand Up @@ -2133,8 +2133,9 @@ private Pair<AllocationPartition, PartitionProperty> createSinglePartition( long

private List<AllocationTable> addAllocationsForPlacement( long namespaceId, Statement statement, LogicalTable logical, long placementId, List<LogicalColumn> lColumns, List<Long> pkIds, List<Long> partitionIds, Adapter<?> adapter ) {
List<AllocationColumn> columns = new ArrayList<>();
for ( LogicalColumn column : lColumns ) {
columns.add( catalog.getAllocRel( namespaceId ).addColumn( placementId, logical.id, column.id, adapter.adapterId, PlacementType.AUTOMATIC, column.position ) );
int i = 0;
for ( LogicalColumn column : sortByPosition( lColumns ) ) {
columns.add( catalog.getAllocRel( namespaceId ).addColumn( placementId, logical.id, column.id, adapter.adapterId, PlacementType.AUTOMATIC, i++ ) );
}

buildNamespace( namespaceId, logical, adapter );
Expand All @@ -2146,15 +2147,15 @@ private List<AllocationTable> addAllocationsForPlacement( long namespaceId, Stat
}


private PartitionProperty addBlankPartition( long namespaceId, long logicalEntityId, List<Long> groupIds, List<Long> allocIds ) {
private PartitionProperty addBlankPartition( long namespaceId, long logicalEntityId, List<Long> groupIds, List<Long> partitionIds ) {
//LogicalPartitionGroup defaultUnpartitionedGroup = catalog.getAllocRel( namespaceId ).addPartitionGroup( logicalEntityId, "full", namespaceId, PartitionType.NONE, 1, List.of(), true );

PartitionProperty partitionProperty = PartitionProperty.builder()
.entityId( logicalEntityId )
.partitionType( PartitionType.NONE )
.isPartitioned( false )
.partitionGroupIds( ImmutableList.copyOf( groupIds ) )
.partitionIds( ImmutableList.copyOf( allocIds ) )
.partitionIds( ImmutableList.copyOf( partitionIds ) )
.reliesOnPeriodicChecks( false )
.build();

Expand All @@ -2166,7 +2167,7 @@ private PartitionProperty addBlankPartition( long namespaceId, long logicalEntit
private AllocationTable addAllocationTable( long namespaceId, Statement statement, LogicalTable logical, List<LogicalColumn> lColumns, List<Long> pkIds, long placementId, long partitionId, List<AllocationColumn> aColumns, Adapter<?> adapter ) {
AllocationTable alloc = catalog.getAllocRel( namespaceId ).addAllocation( adapter.adapterId, placementId, partitionId, logical.id );

adapter.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( logical, sortByPosition( lColumns ), pkIds ), AllocationTableWrapper.of( alloc, sortByPositionAlloc( aColumns ) ) );
adapter.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( logical, sortByPosition( lColumns ), pkIds ), AllocationTableWrapper.of( alloc, aColumns ) );
return alloc;
}

Expand All @@ -2177,12 +2178,6 @@ private static List<LogicalColumn> sortByPosition( List<LogicalColumn> columns )
}


@NotNull
private static List<AllocationColumn> sortByPositionAlloc( List<AllocationColumn> columns ) {
return columns.stream().sorted( Comparator.comparingInt( a -> a.position ) ).collect( Collectors.toList() );
}


private void buildNamespace( long namespaceId, LogicalTable logical, Adapter<?> store ) {
store.updateNamespace( logical.getNamespaceName(), namespaceId );
}
Expand Down Expand Up @@ -2673,6 +2668,7 @@ private Pair<List<AllocationPartition>, PartitionProperty> addGroupsAndPartition
return Pair.of( partitions, partitionProperty );
}


@Override
public void dropTablePartition( LogicalTable table, Statement statement ) throws TransactionException {
long tableId = table.id;
Expand Down
Loading

0 comments on commit d217d5e

Please sign in to comment.