Skip to content

Commit

Permalink
reformatting and mongo adjustment
Browse files Browse the repository at this point in the history
  • Loading branch information
datomo committed Nov 26, 2023
1 parent b57570b commit 0bb567e
Show file tree
Hide file tree
Showing 8 changed files with 33 additions and 96 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
strategy:
fail-fast: false
matrix:
adapter: [ mongodb, hsqldb, monetdb, postgresql, file, cottontail, cassandra, neo4j ]
adapter: [ mongodb, hsqldb, monetdb, postgresql, file, cottontail, neo4j ]
name: Integration Tests (Java 11)
steps:
- name: Checkout
Expand Down
7 changes: 0 additions & 7 deletions core/src/main/java/org/polypheny/db/ResultIterator.java
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,6 @@ public List<List<PolyValue>> getNextBatch() {
res.add( Lists.newArrayList( iterator.next() ) );
}

//List<List<T>> res = MetaImpl.collect( cursorFactory, (Iterator<Object>) iterator., new ArrayList<>() ).stream().map( e -> (List<T>) e ).collect( Collectors.toList() );

if ( isTimed ) {
stopWatch.stop();
executionTimeMonitor.setExecutionTime( stopWatch.getNanoTime() );
Expand Down Expand Up @@ -116,11 +114,6 @@ public List<List<PolyValue>> getAllRowsAndClose() {
}


public List<PolyValue> getSingleRows() {
return getNextBatch( null );
}


@NotNull
private <D> List<D> getNextBatch( @Nullable Function<PolyValue[], D> transformer ) {
final Iterable<PolyValue[]> iterable = () -> iterator;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,11 @@
import java.util.List;
import java.util.Optional;
import javax.annotation.Nullable;
import org.apache.calcite.linq4j.tree.Expression;
import org.apache.calcite.linq4j.tree.Expressions;
import org.jetbrains.annotations.NotNull;
import org.polypheny.db.adapter.java.AdapterTemplate;
import org.polypheny.db.algebra.constant.FunctionCategory;
import org.polypheny.db.algebra.constant.Syntax;
import org.polypheny.db.algebra.operators.OperatorTable;
import org.polypheny.db.catalog.Catalog;
import org.polypheny.db.catalog.entity.LogicalAdapter;
import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType;
import org.polypheny.db.catalog.entity.LogicalQueryInterface;
Expand All @@ -48,10 +45,6 @@ public interface Snapshot extends OperatorTable {

long id();

default Expression getSnapshotExpression( long id ) {
return Expressions.call( Catalog.CATALOG_EXPRESSION, "getSnapshot", Expressions.constant( id ) );
}


/**
* Get all schemas which fit to the specified filter pattern.
Expand Down

This file was deleted.

6 changes: 1 addition & 5 deletions gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,6 @@ cottontaildb_driver_version = 0.13.0
cottontaildb_grpc_version = 1.36.0
elasticsearch_rest_client_version = 6.2.4
elasticsearch_version = 6.2.4
embedded_monetdb_version = 2.39
embedded_mysql_version = 4.6.1
embedded_postgres_version = 1.3.1
esri_geometry_api_version = 2.2.0
fmpp_plugin_version = 0.9.16
geode_core_version = 1.6.0
Expand Down Expand Up @@ -116,9 +113,8 @@ protobuf_plugin_version = 0.9.4
reflections_version = 0.10.2
shadow_plugin_version = 7.1.1
simplemagic_version = 1.16
slf4j_api_version = 2.0.3
slf4j_api_version = 2.0.9
transport_netty4_client_version = 6.2.4
typesafe_config_version = 1.2.1
unirest_version = 3.11.10
web3j_version = 5.0.0
weka_version = 3.8.0
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
Expand Down Expand Up @@ -80,12 +81,18 @@
import org.polypheny.db.algebra.core.common.Modify.Operation;
import org.polypheny.db.algebra.logical.document.LogicalDocumentModify;
import org.polypheny.db.algebra.logical.relational.LogicalRelModify;
import org.polypheny.db.algebra.type.AlgDataType;
import org.polypheny.db.algebra.type.AlgDataTypeFactory;
import org.polypheny.db.algebra.type.AlgDataTypeImpl;
import org.polypheny.db.algebra.type.AlgProtoDataType;
import org.polypheny.db.catalog.Catalog;
import org.polypheny.db.catalog.entity.LogicalEntity;
import org.polypheny.db.catalog.entity.physical.PhysicalCollection;
import org.polypheny.db.catalog.entity.physical.PhysicalColumn;
import org.polypheny.db.catalog.entity.physical.PhysicalEntity;
import org.polypheny.db.catalog.entity.physical.PhysicalField;
import org.polypheny.db.catalog.exceptions.GenericRuntimeException;
import org.polypheny.db.catalog.logistic.NamespaceType;
import org.polypheny.db.catalog.snapshot.Snapshot;
import org.polypheny.db.plan.AlgOptCluster;
import org.polypheny.db.plan.AlgTraitSet;
Expand Down Expand Up @@ -138,6 +145,27 @@ public class MongoEntity extends PhysicalEntity implements TranslatableEntity, M
}


@Override
public AlgDataType getRowType() {
if ( namespaceType == NamespaceType.RELATIONAL ) {
return buildProto().apply( AlgDataTypeFactory.DEFAULT );
}
return super.getRowType();
}


public AlgProtoDataType buildProto() {
final AlgDataTypeFactory.Builder fieldInfo = AlgDataTypeFactory.DEFAULT.builder();

for ( PhysicalColumn column : fields.stream().map( f -> f.unwrap( PhysicalColumn.class ) ).sorted( Comparator.comparingInt( a -> a.position ) ).collect( Collectors.toList() ) ) {
AlgDataType sqlType = column.getAlgDataType( AlgDataTypeFactory.DEFAULT );
fieldInfo.add( column.id, column.logicalName, column.name, sqlType ).nullable( column.nullable );
}

return AlgDataTypeImpl.proto( fieldInfo.build() );
}


public String toString() {
return "MongoTable {" + physical.name + "}";
}
Expand Down Expand Up @@ -325,7 +353,6 @@ public Modify<?> toModificationCollection(
}



@Override
public Serializable[] getParameterArray() {
return new Serializable[0];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ private void handleUpdate( Implementor implementor ) {
condImplementor.setStaticRowType( implementor.getStaticRowType() );
((MongoAlg) input).implement( condImplementor );
implementor.filter = condImplementor.filter;
assert condImplementor.getStaticRowType() instanceof MongoRowType;
//assert condImplementor.getStaticRowType() instanceof MongoRowType;
MongoRowType rowType = (MongoRowType) condImplementor.getStaticRowType();
int pos = 0;
BsonDocument doc = new BsonDocument();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,35 +26,30 @@
import org.polypheny.db.algebra.constant.ExplainFormat;
import org.polypheny.db.algebra.constant.ExplainLevel;
import org.polypheny.db.algebra.type.AlgDataType;
import org.polypheny.db.catalog.Catalog;
import org.polypheny.db.catalog.exceptions.GenericRuntimeException;
import org.polypheny.db.languages.mql.MqlCollectionStatement;
import org.polypheny.db.languages.mql.MqlCreateCollection;
import org.polypheny.db.languages.mql.MqlNode;
import org.polypheny.db.languages.mql.MqlQueryParameters;
import org.polypheny.db.languages.mql.parser.MqlParser;
import org.polypheny.db.languages.mql.parser.MqlParser.MqlParserConfig;
import org.polypheny.db.languages.mql2alg.MqlToAlgConverter;
import org.polypheny.db.nodes.Node;
import org.polypheny.db.plan.AlgOptCluster;
import org.polypheny.db.plan.AlgOptUtil;
import org.polypheny.db.processing.AutomaticDdlProcessor;
import org.polypheny.db.processing.Processor;
import org.polypheny.db.processing.QueryContext.ParsedQueryContext;
import org.polypheny.db.rex.RexBuilder;
import org.polypheny.db.tools.AlgBuilder;
import org.polypheny.db.transaction.Lock.LockMode;
import org.polypheny.db.transaction.LockManager;
import org.polypheny.db.transaction.Statement;
import org.polypheny.db.transaction.Transaction;
import org.polypheny.db.transaction.TransactionException;
import org.polypheny.db.transaction.TransactionImpl;
import org.polypheny.db.util.DeadlockException;
import org.polypheny.db.util.Pair;
import org.polypheny.db.util.SourceStringReader;


@Slf4j
public class MqlProcessor extends AutomaticDdlProcessor {
public class MqlProcessor extends Processor {

private static final MqlParserConfig parserConfig;

Expand Down Expand Up @@ -101,42 +96,6 @@ public Pair<Node, AlgDataType> validate( Transaction transaction, Node parsed, b
}


@Override
public boolean needsDdlGeneration( Node query, QueryParameters parameters ) {
if ( query instanceof MqlCollectionStatement ) {
return Catalog.snapshot()
.getNamespace( ((MqlQueryParameters) parameters).getNamespaceId() )
.stream().flatMap( n -> Catalog.getInstance().getSnapshot().doc().getCollections( n.id, null ).stream() )
.noneMatch( t -> t.name.equals( ((MqlCollectionStatement) query).getCollection() ) );
}
return false;
}


@Override
public void autoGenerateDDL( Statement statement, ParsedQueryContext context ) {
if ( context.getQueryNode().getEntity() == null ) {
try {
statement.getTransaction().commit();
} catch ( TransactionException e ) {
throw new GenericRuntimeException( "There was a problem auto-generating the needed collection." );
}

throw new GenericRuntimeException( "No collections is used." );
}
new MqlCreateCollection(
ParserPos.sum( Collections.singletonList( context.getQueryNode() ) ),
context.getQueryNode().getEntity(),
null ).execute( statement.getPrepareContext(), statement, context );
try {
statement.getTransaction().commit();
Catalog.getInstance().commit();
} catch ( TransactionException e ) {
throw new GenericRuntimeException( "There was a problem auto-generating the needed collection." );
}
}


@Override
public AlgRoot translate( Statement statement, ParsedQueryContext context ) {
final StopWatch stopWatch = new StopWatch();
Expand Down

0 comments on commit 0bb567e

Please sign in to comment.