Skip to content

Commit

Permalink
Merge branch 'refactor' of https://github.com/polypheny/Polypheny-DB
Browse files Browse the repository at this point in the history
…into backup-development
  • Loading branch information
flurfis committed Nov 1, 2023
2 parents 7a8fc30 + 121e262 commit 57fb8e1
Show file tree
Hide file tree
Showing 26 changed files with 116 additions and 153 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ default void resetParameterValues() {
throw new UnsupportedOperationException();
}

default Object getParameterValue( long index ) {
default PolyValue getParameterValue( long index ) {
if ( getParameterValues().size() != 1 ) {
throw new GenericRuntimeException( "Illegal number of parameter sets" );
}
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/java/org/polypheny/db/adapter/Scannable.java
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,8 @@ static PhysicalTable createSubstitutionTable( Scannable scannable, Context conte
AllocationColumn alloc = new AllocationColumn( logical.namespaceId, allocSubTable.placementId, allocSubTable.logicalId, column.id, PlacementType.AUTOMATIC, i++, allocation.adapterId );
allocColumns.add( alloc );
}

scannable.createTable( context, LogicalTableWrapper.of( table, columns ), AllocationTableWrapper.of( allocSubTable, allocColumns ) );
// we use first as pk
scannable.createTable( context, LogicalTableWrapper.of( table, columns, List.of( columns.get( 0 ).id ) ), AllocationTableWrapper.of( allocSubTable, allocColumns ) );
return scannable.getCatalog().getPhysicalsFromAllocs( allocSubTable.id ).get( 0 ).unwrap( PhysicalTable.class );
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,8 +193,9 @@ public interface LogicalRelationalCatalog extends LogicalCatalog {
*
* @param tableId The id of the table
* @param columnIds The id of key which will be part of the primary keys
* @return
*/
void addPrimaryKey( long tableId, List<Long> columnIds );
LogicalTable addPrimaryKey( long tableId, List<Long> columnIds );


/**
Expand All @@ -216,8 +217,9 @@ public interface LogicalRelationalCatalog extends LogicalCatalog {
* @param tableId The id of the table
* @param constraintName The name of the constraint
* @param columnIds A list of column ids
* @return
*/
void addUniqueConstraint( long tableId, String constraintName, List<Long> columnIds );
LogicalTable addUniqueConstraint( long tableId, String constraintName, List<Long> columnIds );

/**
* Deletes the specified primary key (including the entry in the key table). If there is an index on this key, make sure to delete it first.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,6 @@ public class LogicalTableWrapper {

public List<LogicalColumn> columns;

public List<Long> pkIds;

}
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,7 @@ public void deleteDefaultValue( long columnId ) {


@Override
public void addPrimaryKey( long tableId, List<Long> columnIds ) {
public LogicalTable addPrimaryKey( long tableId, List<Long> columnIds ) {
if ( columnIds.stream().anyMatch( id -> columns.get( id ).nullable ) ) {
throw new GenericRuntimeException( "Primary key is not allowed to use nullable columns." );
}
Expand All @@ -390,6 +390,7 @@ public void addPrimaryKey( long tableId, List<Long> columnIds ) {
long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY );
setPrimaryKey( tableId, keyId );
change();
return tables.get( tableId );
}


Expand Down Expand Up @@ -500,7 +501,7 @@ public void addForeignKey( long tableId, List<Long> columnIds, long referencesTa


@Override
public void addUniqueConstraint( long tableId, String constraintName, List<Long> columnIds ) {
public LogicalTable addUniqueConstraint( long tableId, String constraintName, List<Long> columnIds ) {
long keyId = getOrAddKey( tableId, columnIds, EnforcementTime.ON_QUERY );
// Check if there is already a unique constraint
List<LogicalConstraint> logicalConstraints = constraints.values().stream()
Expand All @@ -514,6 +515,7 @@ public void addUniqueConstraint( long tableId, String constraintName, List<Long>
constraints.put( id, new LogicalConstraint( id, keyId, ConstraintType.UNIQUE, constraintName, Objects.requireNonNull( keys.get( keyId ) ) ) );
change();
}
return tables.get( tableId );
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1261,35 +1261,6 @@ public void addRuleDuringRuntime( AlgOptRule rule ) {
}

}

/*AlgVisitor visitor = new AlgVisitor() {
final Set<AlgSubset> visitedSubsets = new HashSet<>();
@Override
public void visit( AlgNode node, int ordinal, AlgNode parent ) {
if ( node instanceof AlgSubset ) {
AlgSubset subset = (AlgSubset) node;
if ( visitedSubsets.contains( subset ) ) {
return;
}
visitedSubsets.add( subset );
int i = 0;
for ( AlgNode alg : subset.set.algs ) {
visit( alg, i++, subset );
}
} else {
if ( operand.matches( node ) ) {
matches.add( Pair.of( node, (AlgSubset) parent ) );
}
super.visit( node, ordinal, parent );
}
}
};
visitor.go( root );*/
for ( Pair<AlgNode, AlgSet> pair : matches ) {
fireRules( pair.left, true );
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,11 @@
import lombok.Value;
import org.apache.calcite.avatica.util.ByteString;
import org.apache.calcite.linq4j.tree.Expression;
import org.apache.calcite.linq4j.tree.Expressions;
import org.jetbrains.annotations.NotNull;
import org.polypheny.db.type.PolySerializable;
import org.polypheny.db.type.PolyType;
import org.polypheny.db.util.BitString;
import org.polypheny.db.util.ConversionUtil;

@EqualsAndHashCode(callSuper = true)
Expand Down Expand Up @@ -88,7 +90,7 @@ public String toHexString() {

@Override
public Expression asExpression() {
return null;
return Expressions.call( PolyBinary.class, "of", Expressions.constant( value.getBytes() ) );
}


Expand All @@ -100,12 +102,12 @@ public PolySerializable copy() {

@Override
public String toString() {
return value.toBase64String();
return value.toString();
}


public int getBitCount() {
return value.getBytes().length;
return BitString.createFromBytes( value.getBytes() ).getBitCount();
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ public static Function1<PolyValue, Object> getPolyToJava( AlgDataType type, bool
case VIDEO:
return o -> o.asBlob().asByteArray();
default:
throw new org.apache.commons.lang3.NotImplementedException( "meta" );
throw new NotImplementedException( "meta" );
}
}

Expand Down
9 changes: 3 additions & 6 deletions core/src/main/java/org/polypheny/db/util/BitString.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@

import java.math.BigInteger;
import java.util.List;
import lombok.Getter;


/**
Expand All @@ -51,6 +52,7 @@
public class BitString {

private final String bits;
@Getter
private final int bitCount;


Expand Down Expand Up @@ -95,11 +97,6 @@ public String toString() {
}


public int getBitCount() {
return bitCount;
}


public byte[] getAsByteArray() {
return toByteArrayFromBitString( bits, bitCount );
}
Expand Down Expand Up @@ -148,7 +145,7 @@ public static byte[] toByteArrayFromBitString( String bits, int bitCount ) {
}
int byteCount = (bitCount + 7) / 8;
byte[] srcBytes;
if ( bits.length() > 0 ) {
if ( !bits.isEmpty() ) {
BigInteger bigInt = new BigInteger( bits, 2 );
srcBytes = bigInt.toByteArray();
} else {
Expand Down
31 changes: 19 additions & 12 deletions dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ private void handleSource( DataSource<?> adapter ) {
}

buildNamespace( Catalog.defaultNamespaceId, logical, adapter );
adapter.createTable( null, LogicalTableWrapper.of( logical, columns ), AllocationTableWrapper.of( allocation.unwrap( AllocationTable.class ), aColumns ) );
adapter.createTable( null, LogicalTableWrapper.of( logical, columns, List.of() ), AllocationTableWrapper.of( allocation.unwrap( AllocationTable.class ), aColumns ) );
catalog.updateSnapshot();

}
Expand Down Expand Up @@ -732,7 +732,7 @@ public void addDataPlacement( LogicalTable table, List<LogicalColumn> newColumns
AllocationPlacement placement = catalog.getAllocRel( table.namespaceId ).addPlacement( table.id, table.namespaceId, dataStore.adapterId );
PartitionProperty property = catalog.getSnapshot().alloc().getPartitionProperty( table.id ).orElseThrow();

addAllocationsForPlacement( table.namespaceId, statement, table, placement.id, adjustedColumns, property.partitionIds, dataStore );
addAllocationsForPlacement( table.namespaceId, statement, table, placement.id, adjustedColumns, property.partitionIds, primaryKey.columnIds, dataStore );

Catalog.getInstance().updateSnapshot();

Expand Down Expand Up @@ -1488,7 +1488,7 @@ public void modifyPartitionPlacement( LogicalTable table, List<Long> partitionId
table.id,
PlacementType.AUTOMATIC,
DataPlacementRole.UP_TO_DATE );*/
addAllocationTable( table.namespaceId, statement, table, columns, placement.id, partitionId, allocationColumns, store );
addAllocationTable( table.namespaceId, statement, table, columns, List.of(), placement.id, partitionId, allocationColumns, store );
}

//storeId.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( table, null ), (AllocationTableWrapper) null );
Expand Down Expand Up @@ -1817,7 +1817,7 @@ public void createMaterializedView( String viewName, long namespaceId, AlgRoot a
for ( DataStore<?> store : stores ) {
AllocationPlacement placement = catalog.getAllocRel( namespaceId ).addPlacement( view.id, namespaceId, store.adapterId );

addAllocationsForPlacement( namespaceId, statement, view, placement.id, List.copyOf( ids.values() ), List.of( partition.id ), store );
addAllocationsForPlacement( namespaceId, statement, view, placement.id, List.copyOf( ids.values() ), List.of( partition.id ), List.of(), store );

}
addBlankPartition( namespaceId, view.id, List.of( group.id ), List.of( partition.id ) );
Expand Down Expand Up @@ -2096,8 +2096,15 @@ public void createTable( long namespaceId, String name, List<FieldInformation> f
ids.put( information.name, addColumn( namespaceId, information.name, information.typeInformation, information.collation, information.defaultValue, logical.id, information.position, stores, placementType ) );
}

List<Long> pkIds = new ArrayList<>();

for ( ConstraintInformation constraint : constraints ) {
createConstraint( namespaceId, constraint.name, constraint.type, constraint.columnNames.stream().map( key -> ids.get( key ).id ).collect( Collectors.toList() ), logical.id );
List<Long> columnIds = constraint.columnNames.stream().map( key -> ids.get( key ).id ).collect( Collectors.toList() );
createConstraint( namespaceId, constraint.name, constraint.type, columnIds, logical.id );

if ( constraint.type == ConstraintType.PRIMARY ) {
pkIds = columnIds;
}
}

// addATable
Expand All @@ -2108,7 +2115,7 @@ public void createTable( long namespaceId, String name, List<FieldInformation> f
for ( DataStore<?> store : stores ) {
AllocationPlacement placement = catalog.getAllocRel( namespaceId ).addPlacement( logical.id, namespaceId, store.adapterId );

addAllocationsForPlacement( namespaceId, statement, logical, placement.id, columns, List.of( partition.id ), store );
addAllocationsForPlacement( namespaceId, statement, logical, placement.id, columns, pkIds, List.of( partition.id ), store );
}

catalog.updateSnapshot();
Expand All @@ -2124,7 +2131,7 @@ private Pair<AllocationPartition, PartitionProperty> createSinglePartition( long
}


private List<AllocationTable> addAllocationsForPlacement( long namespaceId, Statement statement, LogicalTable logical, long placementId, List<LogicalColumn> lColumns, List<Long> partitionIds, Adapter<?> adapter ) {
private List<AllocationTable> addAllocationsForPlacement( long namespaceId, Statement statement, LogicalTable logical, long placementId, List<LogicalColumn> lColumns, List<Long> pkIds, List<Long> partitionIds, Adapter<?> adapter ) {
List<AllocationColumn> columns = new ArrayList<>();
for ( LogicalColumn column : lColumns ) {
columns.add( catalog.getAllocRel( namespaceId ).addColumn( placementId, logical.id, column.id, adapter.adapterId, PlacementType.AUTOMATIC, column.position ) );
Expand All @@ -2133,7 +2140,7 @@ private List<AllocationTable> addAllocationsForPlacement( long namespaceId, Stat
buildNamespace( namespaceId, logical, adapter );
List<AllocationTable> tables = new ArrayList<>();
for ( Long partitionId : partitionIds ) {
tables.add( addAllocationTable( namespaceId, statement, logical, lColumns, placementId, partitionId, columns, adapter ) );
tables.add( addAllocationTable( namespaceId, statement, logical, lColumns, pkIds, placementId, partitionId, columns, adapter ) );
}
return tables;
}
Expand All @@ -2156,10 +2163,10 @@ private PartitionProperty addBlankPartition( long namespaceId, long logicalEntit
}


private AllocationTable addAllocationTable( long namespaceId, Statement statement, LogicalTable logical, List<LogicalColumn> lColumns, long placementId, long partitionId, List<AllocationColumn> aColumns, Adapter<?> adapter ) {
private AllocationTable addAllocationTable( long namespaceId, Statement statement, LogicalTable logical, List<LogicalColumn> lColumns, List<Long> pkIds, long placementId, long partitionId, List<AllocationColumn> aColumns, Adapter<?> adapter ) {
AllocationTable alloc = catalog.getAllocRel( namespaceId ).addAllocation( adapter.adapterId, placementId, partitionId, logical.id );

adapter.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( logical, sortByPosition( lColumns ) ), AllocationTableWrapper.of( alloc, sortByPositionAlloc( aColumns ) ) );
adapter.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( logical, sortByPosition( lColumns ), pkIds ), AllocationTableWrapper.of( alloc, sortByPositionAlloc( aColumns ) ) );
return alloc;
}

Expand Down Expand Up @@ -2405,7 +2412,7 @@ public void createTablePartition( PartitionInformation partitionInfo, List<DataS
PlacementType.AUTOMATIC,
DataPlacementRole.UP_TO_DATE );*/

partitionAllocations.add( addAllocationTable( partitionInfo.table.namespaceId, statement, unPartitionedTable, logicalColumns, placement.id, partition.id, columns, store ) );
partitionAllocations.add( addAllocationTable( partitionInfo.table.namespaceId, statement, unPartitionedTable, logicalColumns, pkColumnIds, placement.id, partition.id, columns, store ) );
}
newAllocations.put( placement, partitionAllocations );

Expand Down Expand Up @@ -2715,7 +2722,7 @@ public void dropTablePartition( LogicalTable table, Statement statement ) throws
List<AllocationColumn> columns = snapshot.alloc().getColumns( placement.id );

// First create new tables
AllocationTable targetTable = addAllocationTable( table.namespaceId, statement, table, logicalColumns, placement.id, partitionProperty.left.id, columns, store );
AllocationTable targetTable = addAllocationTable( table.namespaceId, statement, table, logicalColumns, pkColumnIds, placement.id, partitionProperty.left.id, columns, store );

catalog.updateSnapshot();
dataMigrator.copyAllocationData(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,7 @@ private void createHotTables( LogicalTable table, List<Long> partitionsFromColdT
DataPlacementRole.UP_TO_DATE );
}

store.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( null, null ), AllocationTableWrapper.of( null, null ) );
store.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( null, null, null ), AllocationTableWrapper.of( null, null ) );

List<LogicalColumn> logicalColumns = new ArrayList<>();
catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerTable( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getSnapshot().rel().getColumn( cp.columnId ).orElseThrow() ) );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1145,7 +1145,7 @@ private AlgNode optimize( AlgRoot logicalRoot, Convention resultConvention ) {
}


private <T> PreparedResult<PolyValue> implement( AlgRoot root, AlgDataType parameterRowType ) {
private PreparedResult<PolyValue> implement( AlgRoot root, AlgDataType parameterRowType ) {
if ( log.isTraceEnabled() ) {
log.trace( "Physical query plan: [{}]", AlgOptUtil.dumpPlan( "-- Physical Plan", root.alg, ExplainFormat.TEXT, ExplainLevel.DIGEST_ATTRIBUTES ) );
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@
import org.polypheny.db.transaction.TransactionManager;
import org.polypheny.db.type.PolyType;
import org.polypheny.db.type.entity.PolyBigDecimal;
import org.polypheny.db.type.entity.PolyBinary;
import org.polypheny.db.type.entity.PolyBoolean;
import org.polypheny.db.type.entity.PolyDate;
import org.polypheny.db.type.entity.PolyDouble;
Expand Down Expand Up @@ -1292,6 +1293,8 @@ private PolyType toPolyType( Rep type ) {
return PolyType.FLOAT;
case STRING:
return PolyType.VARCHAR;
case BYTE_STRING:
return PolyType.BINARY;
case OBJECT:
return PolyType.OTHER;
}
Expand Down Expand Up @@ -1342,6 +1345,8 @@ private PolyValue toPolyValue( TypedValue value ) {
return PolyInteger.of( (Short) jdbc );
case BYTE:
return PolyInteger.of( (byte) jdbc );
case BYTE_STRING:
return PolyBinary.of( (byte[]) jdbc );
}
throw new NotImplementedException( "dbms to poly " + value.type );
}
Expand Down
Loading

0 comments on commit 57fb8e1

Please sign in to comment.