Skip to content

Commit

Permalink
Add alg nodes for identifier collection
Browse files Browse the repository at this point in the history
  • Loading branch information
Tobias Hafner committed Dec 26, 2024
1 parent 61d75b0 commit 6f3c1c9
Show file tree
Hide file tree
Showing 8 changed files with 228 additions and 55 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
/*
* Copyright 2019-2024 The Polypheny Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.polypheny.db.algebra.core.common;

import org.polypheny.db.algebra.AlgNode;
import org.polypheny.db.algebra.SingleAlg;
import org.polypheny.db.plan.AlgCluster;
import org.polypheny.db.plan.AlgTraitSet;
import org.polypheny.db.transaction.Transaction;


public class IdentifierCollector extends SingleAlg {

protected final Transaction transaction;


protected IdentifierCollector( AlgCluster cluster, AlgTraitSet traits, Transaction transaction, AlgNode input ) {
super( cluster, traits, input );
this.transaction = transaction;
}


public String algCompareString() {
return this.getClass().getSimpleName() + "$" +
input.algCompareString() + "$" +
transaction.getId() + "&";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) {
}

@Override
public AlgNode copy(AlgTraitSet traitSete, List<AlgNode> inputs) {
return new LogicalDocIdentifier(entity, getCluster(), traitSete, sole(inputs) );
public AlgNode copy(AlgTraitSet traitSet, List<AlgNode> inputs) {
return new LogicalDocIdentifier(entity, getCluster(), traitSet, sole(inputs) );
}


Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
/*
* Copyright 2019-2024 The Polypheny Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.polypheny.db.algebra.logical.document;

import java.util.List;
import org.polypheny.db.algebra.AlgNode;
import org.polypheny.db.algebra.core.common.IdentifierCollector;
import org.polypheny.db.algebra.core.document.DocumentAlg;
import org.polypheny.db.algebra.metadata.AlgMetadataQuery;
import org.polypheny.db.plan.AlgCluster;
import org.polypheny.db.plan.AlgOptCost;
import org.polypheny.db.plan.AlgPlanner;
import org.polypheny.db.plan.AlgTraitSet;
import org.polypheny.db.transaction.Transaction;

public class LogicalDocumentIdCollector extends IdentifierCollector implements DocumentAlg {

protected LogicalDocumentIdCollector( AlgCluster cluster, AlgTraitSet traits, Transaction transaction, AlgNode input ) {
super( cluster, traits, transaction, input );
}


public static LogicalDocumentIdCollector create( Transaction transaction, final AlgNode input ) {
final AlgCluster cluster = input.getCluster();
final AlgTraitSet traits = input.getTraitSet();
return new LogicalDocumentIdCollector( cluster, traits, transaction, input );
}


@Override
public DocType getDocType() {
// ToDo TH: is this correct?
return DocType.VALUES;
}


@Override
public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) {
double dRows = mq.getTupleCount( getInput() );
return planner.getCostFactory().makeCost( dRows, 0, 0 );
}


@Override
public AlgNode copy( AlgTraitSet traitSet, List<AlgNode> inputs ) {
return new LogicalDocumentIdCollector( getCluster(), traitSet, transaction, sole( inputs ) );
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
/*
* Copyright 2019-2024 The Polypheny Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.polypheny.db.algebra.logical.lpg;

import java.util.List;
import org.polypheny.db.algebra.AlgNode;
import org.polypheny.db.algebra.core.common.IdentifierCollector;
import org.polypheny.db.algebra.core.lpg.LpgAlg;
import org.polypheny.db.algebra.metadata.AlgMetadataQuery;
import org.polypheny.db.plan.AlgCluster;
import org.polypheny.db.plan.AlgOptCost;
import org.polypheny.db.plan.AlgPlanner;
import org.polypheny.db.plan.AlgTraitSet;
import org.polypheny.db.transaction.Transaction;

public class LogicalLpgIdCollector extends IdentifierCollector implements LpgAlg {

protected LogicalLpgIdCollector( AlgCluster cluster, AlgTraitSet traits, Transaction transaction, AlgNode input ) {
super( cluster, traits, transaction, input );
}


public static LogicalLpgIdCollector create( Transaction transaction, final AlgNode input ) {
final AlgCluster cluster = input.getCluster();
final AlgTraitSet traits = input.getTraitSet();
return new LogicalLpgIdCollector( cluster, traits, transaction, input );
}


@Override
public NodeType getNodeType() {
// ToDo: Is this the proper type here?
return NodeType.VALUES;
}


@Override
public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) {
double dRows = mq.getTupleCount( getInput() );
return planner.getCostFactory().makeCost( dRows, 0, 0 );
}


@Override
public AlgNode copy( AlgTraitSet traitSet, List<AlgNode> inputs ) {
return new LogicalLpgIdCollector( getCluster(), traitSet, transaction, sole( inputs ) );
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) {
}

@Override
public AlgNode copy(AlgTraitSet traitSete, List<AlgNode> inputs) {
return new LogicalLpgIdentifier(entity, getCluster(), traitSete, sole(inputs) );
public AlgNode copy(AlgTraitSet traitSet, List<AlgNode> inputs) {
return new LogicalLpgIdentifier(entity, getCluster(), traitSet, sole(inputs) );
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
/*
* Copyright 2019-2024 The Polypheny Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.polypheny.db.algebra.logical.relational;

import java.util.List;
import org.polypheny.db.algebra.AlgNode;
import org.polypheny.db.algebra.core.common.IdentifierCollector;
import org.polypheny.db.algebra.metadata.AlgMetadataQuery;
import org.polypheny.db.plan.AlgCluster;
import org.polypheny.db.plan.AlgOptCost;
import org.polypheny.db.plan.AlgPlanner;
import org.polypheny.db.plan.AlgTraitSet;
import org.polypheny.db.transaction.Transaction;

public class LogicalRelIdCollector extends IdentifierCollector {

protected LogicalRelIdCollector( AlgCluster cluster, AlgTraitSet traits, Transaction transaction, AlgNode input ) {
super( cluster, traits, transaction, input );
}


public static LogicalRelIdCollector create( AlgNode input, Transaction transaction ) {
final AlgCluster cluster = input.getCluster();
final AlgTraitSet traits = input.getTraitSet();
return new LogicalRelIdCollector( cluster, traits, transaction, input );
}


@Override
public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) {
double dRows = mq.getTupleCount( getInput() );
return planner.getCostFactory().makeCost( dRows, 0, 0 );
}


@Override
public AlgNode copy( AlgTraitSet traitSet, List<AlgNode> inputs ) {
return new LogicalRelIdCollector( getCluster(), traitSet, transaction, sole( inputs ) );
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.polypheny.db.processing.QueryContext.ParsedQueryContext;
import org.polypheny.db.transaction.Statement;
import org.polypheny.db.transaction.Transaction;
import org.polypheny.db.transaction.TransactionManager;
import org.polypheny.db.transaction.locking.AlgTreeRewriter;
import org.polypheny.db.util.DeadlockException;
import org.polypheny.db.util.Pair;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,32 +103,6 @@ public AlgNode visit( LogicalRelTableFunctionScan scan ) {
@Override
public AlgNode visit( LogicalRelValues values ) {
return values;
/*
Inserts can be divided into two categories according to the nature of their input:
1) Values as input
2) Inputs that are not values.
In case 1, we already add the identifiers here by modifying the values.
Case 2 is handled using rules in the planner.
ImmutableList<ImmutableList<RexLiteral>> newValues = values.tuples.stream()
.map(row -> ImmutableList.<RexLiteral>builder()
.add(IdentifierUtils.getIdentifierAsLiteral())
.addAll(row)
.build())
.collect(ImmutableList.toImmutableList());
List<AlgDataTypeField> newFields = new ArrayList<>();
newFields.add(new AlgDataTypeFieldImpl(0L, "_eid", "_eid", 0, IdentifierUtils.IDENTIFIER_ALG_TYPE ));
values.getRowType().getFields().stream()
.map(f -> new AlgDataTypeFieldImpl(f.getId(), f.getName(), f.getPhysicalName(), f.getIndex() + 1, f.getType()))
.forEach(newFields::add);
AlgDataType newRowType = new AlgRecordType( StructKind.FULLY_QUALIFIED, newFields );
return LogicalRelValues.create( values.getCluster(), newRowType, newValues );
*/

}


Expand All @@ -141,19 +115,6 @@ public AlgNode visit( LogicalRelFilter filter ) {
@Override
public AlgNode visit( LogicalRelProject project ) {
return visitChildren( project );

//AlgNode input = project.getInput();
//if (! (input instanceof LogicalRelValues oldValues)) {
// return project;
//}
/*
When values are used for an insert, they are modified.
The project has to be adjusted accordingly.
*/
//LogicalRelValues newValues = (LogicalRelValues) visit(oldValues);
//List<RexNode> newProjects = createNewProjects(project, newValues);
//return project.copy(project.getTraitSet(), newValues, newProjects, project.getRowType());

}


Expand Down Expand Up @@ -276,11 +237,6 @@ public AlgNode visit( LogicalLpgScan scan ) {

@Override
public AlgNode visit( LogicalLpgValues values ) {
/*
values.getNodes().forEach( n -> n.getProperties().put( IdentifierUtils.getIdentifierKeyAsPolyString(), IdentifierUtils.getIdentifierAsPolyLong() ) );
values.getEdges().forEach( n -> n.getProperties().put( IdentifierUtils.getIdentifierKeyAsPolyString(), IdentifierUtils.getIdentifierAsPolyLong() ) );
return values;
*/
return values;
}

Expand Down Expand Up @@ -384,13 +340,6 @@ public AlgNode visit( LogicalDocumentTransformer transformer ) {

@Override
public AlgNode visit( LogicalDocumentValues values ) {
/*
IdentifierUtils.throwIfContainsIdentifierKey( values.getDocuments() );
List<PolyDocument> newDocuments = values.getDocuments().stream()
.peek( d -> d.put( IdentifierUtils.getIdentifierKeyAsPolyString(), IdentifierUtils.getIdentifierAsPolyLong() ) )
.collect( Collectors.toCollection( LinkedList::new ) );
return LogicalDocumentValues.create( values.getCluster(), newDocuments );
*/
return values;
}

Expand Down

0 comments on commit 6f3c1c9

Please sign in to comment.