diff --git a/core/src/main/java/org/polypheny/db/adapter/ConnectionMethod.java b/core/src/main/java/org/polypheny/db/adapter/ConnectionMethod.java index 3842b14628..52a9be5e96 100644 --- a/core/src/main/java/org/polypheny/db/adapter/ConnectionMethod.java +++ b/core/src/main/java/org/polypheny/db/adapter/ConnectionMethod.java @@ -19,7 +19,9 @@ public enum ConnectionMethod { LINK, - UPLOAD; + UPLOAD, + URL; + public static ConnectionMethod from( String name ) { return ConnectionMethod.valueOf( name.toUpperCase() ); diff --git a/core/src/main/java/org/polypheny/db/adapter/DataSource.java b/core/src/main/java/org/polypheny/db/adapter/DataSource.java index 6234046bfd..9933b8c192 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataSource.java @@ -18,68 +18,26 @@ import com.google.gson.JsonObject; import com.google.gson.JsonSerializer; -import java.util.List; import java.util.Map; -import lombok.AllArgsConstructor; +import java.util.Set; import lombok.Getter; import org.pf4j.ExtensionPoint; import org.polypheny.db.catalog.catalogs.AdapterCatalog; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; -import org.polypheny.db.type.PolyType; +import org.polypheny.db.catalog.logistic.DataModel; @Getter public abstract class DataSource extends Adapter implements ExtensionPoint { + private final Set supportedDataModels; private final boolean dataReadOnly; - protected DataSource( final long adapterId, final String uniqueName, final Map settings, final DeployMode mode, boolean dataReadOnly, S catalog ) { + protected DataSource( final long adapterId, final String uniqueName, final Map settings, final DeployMode mode, boolean dataReadOnly, S catalog, Set supportedModels ) { super( adapterId, uniqueName, settings, mode, catalog ); this.dataReadOnly = dataReadOnly; - + this.supportedDataModels = Set.copyOf( supportedModels ); informationPage.setLabel( "Sources" ); - } - - - public abstract Map> getExportedColumns(); - - - @AllArgsConstructor - public static class ExportedColumn { - - public final String name; - public final PolyType type; - public final PolyType collectionsType; - public final Integer length; - public final Integer scale; - public final Integer dimension; - public final Integer cardinality; - public final boolean nullable; - public final String physicalSchemaName; - public final String physicalTableName; - public final String physicalColumnName; - public final int physicalPosition; - public final boolean primary; - - - public String getDisplayType() { - String typeStr = type.getName(); - if ( scale != null ) { - typeStr += "(" + length + "," + scale + ")"; - } else if ( length != null ) { - typeStr += "(" + length + ")"; - } - - if ( collectionsType != null ) { - typeStr += " " + collectionsType.getName(); - if ( cardinality != null ) { - typeStr += "(" + dimension + "," + cardinality + ")"; - } else if ( dimension != null ) { - typeStr += "(" + dimension + ")"; - } - } - return typeStr; - } } @@ -104,4 +62,37 @@ private AdapterType getAdapterType() { return AdapterType.SOURCE; } + + public boolean supportsRelational() { + return supportedDataModels.contains( DataModel.RELATIONAL ); + } + + + public boolean supportsDocument() { + return supportedDataModels.contains( DataModel.DOCUMENT ); + } + + + public boolean supportsGraph() { + return supportedDataModels.contains( DataModel.GRAPH ); + } + + + public RelationalDataSource asRelationalDataSource() { + // should be overridden by subclasses accordingly + throw new IllegalStateException( "This source does not support the relational data model." ); + } + + + public DocumentDataSource asDocumentDataSource() { + // should be overridden by subclasses accordingly + throw new IllegalStateException( "This source does not support the document data model." ); + } + + + public DocumentDataSource asGraphDataSource() { + // should be overridden by subclasses accordingly + throw new IllegalStateException( "This source does not support the graph data model." ); + } + } diff --git a/core/src/main/java/org/polypheny/db/adapter/DocumentDataSource.java b/core/src/main/java/org/polypheny/db/adapter/DocumentDataSource.java new file mode 100644 index 0000000000..15ea0008af --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/DocumentDataSource.java @@ -0,0 +1,31 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter; + +import java.util.List; +import lombok.Getter; +import org.polypheny.db.catalog.logistic.EntityType; + +public interface DocumentDataSource { + + List getExportedCollection(); + + record ExportedDocument( String name, boolean isModifiable, EntityType type ) { + + } + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/RelationalDataSource.java b/core/src/main/java/org/polypheny/db/adapter/RelationalDataSource.java new file mode 100644 index 0000000000..5ceec1aede --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/RelationalDataSource.java @@ -0,0 +1,50 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter; + +import java.util.List; +import java.util.Map; +import org.polypheny.db.type.PolyType; + +public interface RelationalDataSource { + + Map> getExportedColumns(); + + record ExportedColumn( String name, PolyType type, PolyType collectionsType, Integer length, Integer scale, Integer dimension, Integer cardinality, boolean nullable, String physicalSchemaName, String physicalTableName, String physicalColumnName, int physicalPosition, boolean primary ) { + + public String getDisplayType() { + String typeStr = type.getName(); + if ( scale != null ) { + typeStr += "(" + length + "," + scale + ")"; + } else if ( length != null ) { + typeStr += "(" + length + ")"; + } + + if ( collectionsType != null ) { + typeStr += " " + collectionsType.getName(); + if ( cardinality != null ) { + typeStr += "(" + dimension + "," + cardinality + ")"; + } else if ( dimension != null ) { + typeStr += "(" + dimension + ")"; + } + } + return typeStr; + } + + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java index eb2e145124..2578beff96 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalCollection.java @@ -50,4 +50,10 @@ public Expression asExpression() { return Expressions.call( Catalog.CATALOG_EXPRESSION, "getCollection", Expressions.constant( id ) ); } + + @Override + public String getNamespaceName() { + return Catalog.snapshot().getNamespace( namespaceId ).orElseThrow().name; + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java index d2b263dea5..8345a2f6c1 100644 --- a/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java +++ b/core/src/main/java/org/polypheny/db/catalog/snapshot/impl/LogicalDocSnapshotImpl.java @@ -29,6 +29,7 @@ import org.polypheny.db.catalog.catalogs.LogicalDocumentCatalog; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalNamespace; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.Pattern; import org.polypheny.db.catalog.snapshot.LogicalDocSnapshot; @@ -45,7 +46,15 @@ public class LogicalDocSnapshotImpl implements LogicalDocSnapshot { public LogicalDocSnapshotImpl( Map catalogs ) { this.namespaces = ImmutableMap.copyOf( catalogs.values().stream().collect( Collectors.toMap( n -> n.getLogicalNamespace().id, LogicalCatalog::getLogicalNamespace ) ) ); this.collections = ImmutableMap.copyOf( catalogs.values().stream().flatMap( c -> c.getCollections().values().stream() ).collect( Collectors.toMap( c -> c.id, c -> c ) ) ); - this.collectionNames = ImmutableMap.copyOf( this.collections.values().stream().collect( Collectors.toMap( c -> c.name, c -> c ) ) ); + this.collectionNames = ImmutableMap.copyOf( this.collections.values().stream().collect( + Collectors.toMap( + c -> c.name, + c -> c, + ( existing, replacement ) -> { + throw new GenericRuntimeException( "A collection of documents called '" + existing.name + "' already exists." ); + } + ) + ) ); this.namespaceCollections = ImmutableMap.copyOf( catalogs.values().stream().collect( Collectors.toMap( c -> c.getLogicalNamespace().id, c -> List.copyOf( c.getCollections().values() ) ) ) ); } diff --git a/core/src/main/java/org/polypheny/db/routing/DmlRouter.java b/core/src/main/java/org/polypheny/db/routing/DmlRouter.java index 5a1ebdccee..dcdef0b3e8 100644 --- a/core/src/main/java/org/polypheny/db/routing/DmlRouter.java +++ b/core/src/main/java/org/polypheny/db/routing/DmlRouter.java @@ -34,7 +34,7 @@ public interface DmlRouter { /** * Routes DML queries and returns a RelNode. */ - AlgNode routeDml( LogicalRelModify node, Statement statement ); + AlgNode routeRelationalDml( LogicalRelModify node, Statement statement ); /** * Routes conditional executes and directly returns a RelNode. diff --git a/core/src/main/java/org/polypheny/db/security/SecurityManager.java b/core/src/main/java/org/polypheny/db/security/SecurityManager.java index 0d2445d7eb..1fbb479d1f 100644 --- a/core/src/main/java/org/polypheny/db/security/SecurityManager.java +++ b/core/src/main/java/org/polypheny/db/security/SecurityManager.java @@ -25,8 +25,9 @@ import javax.annotation.Nullable; import lombok.Getter; import lombok.Setter; +import lombok.extern.slf4j.Slf4j; - +@Slf4j public class SecurityManager { private static SecurityManager INSTANCE = null; @@ -74,15 +75,26 @@ public boolean checkPathAccess( Path path ) { dir = dir.getParent(); } AuthStatus status = this.status.get( dir ); - + if ( dir.startsWith( "classpath:" ) ) { + status.setStep( AuthStep.SUCCESSFUL ); + return true; + } + if ( status == null ) { + log.debug( "No auth status available for directory {}", dir ); + return false; + } if ( status.step == AuthStep.SUCCESSFUL ) { return true; } - if ( Arrays.stream( Objects.requireNonNull( status.path.toFile().listFiles() ) ).noneMatch( f -> f.getName().equals( "polypheny.access" ) && f.isFile() ) ) { - // TODO: if more fine-grained access control is required, add as content of file + try { + if ( Arrays.stream( Objects.requireNonNull( status.path.toFile().listFiles() ) ).noneMatch( f -> f.getName().equals( "polypheny.access" ) && f.isFile() ) ) { + // TODO: if more fine-grained access control is required, add as content of file + return false; + } + } catch ( Exception e ) { + log.debug( "Filed to check for polypheny.access as the specified path is not a directory {}", dir ); return false; } - status.setStep( AuthStep.SUCCESSFUL ); return true; } diff --git a/core/src/main/java/org/polypheny/db/util/Source.java b/core/src/main/java/org/polypheny/db/util/Source.java index 13117f6ee4..24d933a6c0 100644 --- a/core/src/main/java/org/polypheny/db/util/Source.java +++ b/core/src/main/java/org/polypheny/db/util/Source.java @@ -52,6 +52,8 @@ public interface Source { String path(); + boolean isFile(); + Reader reader() throws IOException; InputStream openStream() throws IOException; diff --git a/core/src/main/java/org/polypheny/db/util/Sources.java b/core/src/main/java/org/polypheny/db/util/Sources.java index e0d3a25084..ba13bb1f50 100644 --- a/core/src/main/java/org/polypheny/db/util/Sources.java +++ b/core/src/main/java/org/polypheny/db/util/Sources.java @@ -100,7 +100,7 @@ private static String trimOrNull( String s, String suffix ) { } - private static boolean isFile( Source source ) { + private static boolean protocolIsFile( Source source ) { return source.protocol().equals( "file" ); } @@ -190,6 +190,12 @@ public String path() { } + @Override + public boolean isFile() { + return file != null; + } + + @Override public Reader reader() throws IOException { final InputStream is; @@ -234,7 +240,7 @@ public Source trimOrNull( String suffix ) { @Override public Source append( Source child ) { - if ( isFile( child ) ) { + if ( protocolIsFile( child ) ) { if ( child.file().isAbsolute() ) { return child; } @@ -261,8 +267,8 @@ public Source append( Source child ) { @Override public Source relative( Source parent ) { - if ( isFile( parent ) ) { - if ( isFile( this ) && file.getPath().startsWith( parent.file().getPath() ) ) { + if ( protocolIsFile( parent ) ) { + if ( protocolIsFile( this ) && file.getPath().startsWith( parent.file().getPath() ) ) { String rest = file.getPath().substring( parent.file().getPath().length() ); if ( rest.startsWith( File.separator ) ) { return Sources.file( null, rest.substring( File.separator.length() ) ); @@ -270,7 +276,7 @@ public Source relative( Source parent ) { } return this; } else { - if ( !isFile( this ) ) { + if ( !protocolIsFile( this ) ) { String rest = Sources.trimOrNull( url.toExternalForm(), parent.url().toExternalForm() ); if ( rest != null && rest.startsWith( "/" ) ) { return Sources.file( null, rest.substring( 1 ) ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 81f973d537..e5a667ad8a 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -29,6 +29,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.function.BiFunction; import java.util.function.Supplier; import java.util.stream.Collectors; import javax.annotation.Nullable; @@ -37,10 +38,11 @@ import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataSource; -import org.polypheny.db.adapter.DataSource.ExportedColumn; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.IndexMethodModel; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DocumentDataSource.ExportedDocument; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; @@ -212,10 +214,48 @@ public void createStore( String uniqueName, String adapterName, AdapterType adap public void createSource( String uniqueName, String adapterName, long namespace, AdapterType adapterType, Map config, DeployMode mode ) { uniqueName = uniqueName.toLowerCase(); DataSource adapter = (DataSource) AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, mode, config ); + namespace = adapter.getCurrentNamespace() == null ? namespace : adapter.getCurrentNamespace().getId(); // TODO: clean implementation. Sources should either create their own namespace or there should be default namespaces for different models. + if ( adapter.supportsRelational() ) { + createRelationalSource( adapter, namespace ); + } + if ( adapter.supportsDocument() ) { + createDocumentSource( adapter, namespace ); + } + if ( adapter.supportsGraph() ) { + // TODO: implement graph source creation + throw new IllegalArgumentException( "Adapters with native data model graph are not yet supported!" ); + } + catalog.updateSnapshot(); + } + + + private void createDocumentSource( DataSource adapter, long namespace ) { + List exportedCollections; + try { + exportedCollections = adapter.asDocumentDataSource().getExportedCollection(); + } catch ( Exception e ) { + AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() ); + throw new GenericRuntimeException( "Could not deploy adapter", e ); + } + + for ( ExportedDocument exportedDocument : exportedCollections ) { + String documentName = getUniqueEntityName( namespace, exportedDocument.name(), ( ns, en ) -> catalog.getSnapshot().doc().getCollection( ns, en ) ); + LogicalCollection logicalCollection = catalog.getLogicalDoc( namespace ).addCollection( documentName, exportedDocument.type(), exportedDocument.isModifiable() ); + AllocationPartition partition = catalog.getAllocDoc( namespace ).addPartition( logicalCollection, PartitionType.NONE, null ); + AllocationPlacement placement = catalog.getAllocDoc( namespace ).addPlacement( logicalCollection, adapter.getAdapterId() ); + AllocationCollection allocationCollection = catalog.getAllocDoc( namespace ).addAllocation( logicalCollection, placement.getId(), partition.getId(), adapter.getAdapterId() ); + + buildDocumentNamespace( namespace, logicalCollection, adapter ); + adapter.createCollection( null, logicalCollection, allocationCollection ); + catalog.updateSnapshot(); + } + } + + private void createRelationalSource( DataSource adapter, long namespace ) { Map> exportedColumns; try { - exportedColumns = adapter.getExportedColumns(); + exportedColumns = adapter.asRelationalDataSource().getExportedColumns(); } catch ( Exception e ) { AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() ); throw new GenericRuntimeException( "Could not deploy adapter", e ); @@ -223,14 +263,7 @@ public void createSource( String uniqueName, String adapterName, long namespace, // Create table, columns etc. for ( Map.Entry> entry : exportedColumns.entrySet() ) { // Make sure the table name is unique - String tableName = entry.getKey(); - if ( catalog.getSnapshot().rel().getTable( namespace, tableName ).isPresent() ) { - int i = 0; - while ( catalog.getSnapshot().rel().getTable( namespace, tableName + i ).isPresent() ) { - i++; - } - tableName += i; - } + String tableName = getUniqueEntityName( namespace, entry.getKey(), ( ns, en ) -> catalog.getSnapshot().rel().getTable( ns, en ) ); LogicalTable logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); List columns = new ArrayList<>(); @@ -244,16 +277,16 @@ public void createSource( String uniqueName, String adapterName, long namespace, for ( ExportedColumn exportedColumn : entry.getValue() ) { LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( - exportedColumn.name, + exportedColumn.name(), logical.id, colPos++, - exportedColumn.type, - exportedColumn.collectionsType, - exportedColumn.length, - exportedColumn.scale, - exportedColumn.dimension, - exportedColumn.cardinality, - exportedColumn.nullable, + exportedColumn.type(), + exportedColumn.collectionsType(), + exportedColumn.length(), + exportedColumn.scale(), + exportedColumn.dimension(), + exportedColumn.cardinality(), + exportedColumn.nullable(), Collation.getDefaultCollation() ); AllocationColumn allocationColumn = catalog.getAllocRel( namespace ).addColumn( @@ -262,22 +295,31 @@ public void createSource( String uniqueName, String adapterName, long namespace, column.id, adapter.adapterId, PlacementType.STATIC, - exportedColumn.physicalPosition ); // Not a valid partitionGroupID --> placeholder + exportedColumn.physicalPosition() ); // Not a valid partitionGroupID --> placeholder columns.add( column ); aColumns.add( allocationColumn ); } - buildNamespace( Catalog.defaultNamespaceId, logical, adapter ); + buildRelationalNamespace( namespace, logical, adapter ); catalog.attachCommitAction( () -> // we can execute with initial logical and allocation data as this is a source and this will not change adapter.createTable( null, LogicalTableWrapper.of( logical, columns, List.of() ), AllocationTableWrapper.of( allocation.unwrapOrThrow( AllocationTable.class ), aColumns ) ) ); catalog.updateSnapshot(); - } - catalog.updateSnapshot(); + } + + private String getUniqueEntityName( Long namespace, String name, BiFunction> retriever ) { + if ( retriever.apply( namespace, name ).isEmpty() ) { + return name; + } + int enumerator = 0; + while ( retriever.apply( namespace, name + enumerator ).isPresent() ) { + enumerator++; + } + return name + enumerator; } @@ -288,14 +330,15 @@ public void dropAdapter( String name, Statement statement ) { LogicalAdapter adapter = catalog.getSnapshot().getAdapter( name ).orElseThrow(); if ( adapter.type == AdapterType.SOURCE ) { for ( AllocationEntity allocation : catalog.getSnapshot().alloc().getEntitiesOnAdapter( adapter.id ).orElse( List.of() ) ) { - // Make sure that there is only one adapter - if ( catalog.getSnapshot().alloc().getFromLogical( allocation.logicalId ).size() != 1 ) { - throw new GenericRuntimeException( "The data source contains entities with more than one placement. This should not happen!" ); + if ( catalog.getSnapshot().alloc().getFromLogical( allocation.logicalId ).isEmpty() ) { + continue; } if ( allocation.unwrap( AllocationCollection.class ).isPresent() ) { - dropCollection( catalog.getSnapshot().doc().getCollection( allocation.adapterId ).orElseThrow(), statement ); - } else if ( allocation.unwrap( AllocationTable.class ).isPresent() ) { + dropNamespace( catalog.getSnapshot().doc().getCollection( allocation.logicalId ).orElseThrow().getNamespaceName(), true, statement ); + continue; + } + if ( allocation.unwrap( AllocationTable.class ).isPresent() ) { for ( LogicalForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( allocation.logicalId ) ) { catalog.getLogicalRel( allocation.namespaceId ).deleteForeignKey( fk.id ); @@ -380,20 +423,14 @@ public void addColumnToSourceTable( LogicalTable table, String columnPhysicalNam long adapterId = allocation.adapterId; DataSource dataSource = AdapterManager.getInstance().getSource( adapterId ).orElseThrow(); - //String physicalTableName = catalog.getSnapshot().alloc().getPhysicalTable( catalogTable.id, adapterId ).name; - List exportedColumns = dataSource.getExportedColumns().get( table.name ); + List exportedColumns = dataSource.asRelationalDataSource().getExportedColumns().get( table.name ); // Check if physicalColumnName is valid - ExportedColumn exportedColumn = null; - for ( ExportedColumn ec : exportedColumns ) { - if ( ec.physicalColumnName.equalsIgnoreCase( columnPhysicalName ) ) { - exportedColumn = ec; - } - } - if ( exportedColumn == null ) { - throw new GenericRuntimeException( "Invalid physical column name '%s'", columnPhysicalName ); - } + ExportedColumn exportedColumn = exportedColumns.stream() + .filter( ec -> ec.physicalColumnName().equalsIgnoreCase( columnPhysicalName ) ) + .findAny() + .orElseThrow( () -> new GenericRuntimeException( "Invalid physical column name '%s'", columnPhysicalName ) ); int position = updateAdjacentPositions( table, beforeColumn, afterColumn ); @@ -401,13 +438,13 @@ public void addColumnToSourceTable( LogicalTable table, String columnPhysicalNam columnLogicalName, table.id, position, - exportedColumn.type, - exportedColumn.collectionsType, - exportedColumn.length, - exportedColumn.scale, - exportedColumn.dimension, - exportedColumn.cardinality, - exportedColumn.nullable, + exportedColumn.type(), + exportedColumn.collectionsType(), + exportedColumn.length(), + exportedColumn.scale(), + exportedColumn.dimension(), + exportedColumn.cardinality(), + exportedColumn.nullable(), Collation.getDefaultCollation() ); @@ -2119,7 +2156,7 @@ private List addAllocationsForPlacement( long namespaceId, Stat columns.add( catalog.getAllocRel( namespaceId ).addColumn( placementId, logical.id, column.id, adapter.adapterId, PlacementType.AUTOMATIC, i++ ) ); } - buildNamespace( namespaceId, logical, adapter ); + buildRelationalNamespace( namespaceId, logical, adapter ); List tables = new ArrayList<>(); for ( Long partitionId : partitionIds ) { tables.add( addAllocationTable( namespaceId, statement, logical, placementId, partitionId, adapter, true ) ); @@ -2176,7 +2213,12 @@ private static List sortByPosition( List columns ) } - private void buildNamespace( long namespaceId, LogicalTable logical, Adapter store ) { + private void buildRelationalNamespace( long namespaceId, LogicalTable logical, Adapter store ) { + store.updateNamespace( logical.getNamespaceName(), namespaceId ); + } + + + private void buildDocumentNamespace( long namespaceId, LogicalCollection logical, Adapter store ) { store.updateNamespace( logical.getNamespaceName(), namespaceId ); } @@ -2242,8 +2284,7 @@ public void dropCollection( LogicalCollection collection, Statement statement ) List allocations = snapshot.alloc().getFromLogical( collection.id ); for ( AllocationEntity allocation : allocations ) { - manager.getStore( allocation.adapterId ).orElseThrow().dropCollection( statement.getPrepareContext(), allocation.unwrapOrThrow( AllocationCollection.class ) ); - + manager.getAdapter( allocation.adapterId ).orElseThrow().dropCollection( statement.getPrepareContext(), allocation.unwrapOrThrow( AllocationCollection.class ) ); catalog.getAllocDoc( allocation.namespaceId ).removeAllocation( allocation.id ); catalog.getAllocDoc( allocation.namespaceId ).removePlacement( allocation.placementId ); } @@ -2251,7 +2292,6 @@ public void dropCollection( LogicalCollection collection, Statement statement ) catalog.getLogicalDoc( collection.namespaceId ).deleteCollection( collection.id ); catalog.updateSnapshot(); - // Reset plan cache implementation cache & routing cache statement.getQueryProcessor().resetCaches(); } diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index 275e0530dc..b1af71fdff 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -881,7 +881,7 @@ private List route( AlgRoot logicalRoot, Statement statemen } else if ( logicalRoot.getModel() == ModelTrait.DOCUMENT ) { return routeDocument( logicalRoot, queryInformation, dmlRouter ); } else if ( logicalRoot.alg instanceof LogicalRelModify ) { - AlgNode routedDml = dmlRouter.routeDml( (LogicalRelModify) logicalRoot.alg, statement ); + AlgNode routedDml = dmlRouter.routeRelationalDml( (LogicalRelModify) logicalRoot.alg, statement ); return Lists.newArrayList( new ProposedRoutingPlanImpl( routedDml, logicalRoot, queryInformation.getQueryHash() ) ); } else if ( logicalRoot.alg instanceof ConditionalExecute ) { AlgNode routedConditionalExecute = dmlRouter.handleConditionalExecute( logicalRoot.alg, context ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java index fbc03f6e42..a6e40886ff 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/DmlRouterImpl.java @@ -99,7 +99,7 @@ public class DmlRouterImpl extends BaseRouter implements DmlRouter { @Override - public AlgNode routeDml( LogicalRelModify modify, Statement statement ) { + public AlgNode routeRelationalDml( LogicalRelModify modify, Statement statement ) { AlgCluster cluster = modify.getCluster(); if ( modify.entity == null ) { @@ -602,7 +602,7 @@ public AlgNode handleConditionalExecute( AlgNode node, RoutingContext context ) if ( lce.getRight() instanceof LogicalConditionalExecute ) { action = handleConditionalExecute( lce.getRight(), context ); } else if ( lce.getRight() instanceof LogicalRelModify ) { - action = routeDml( (LogicalRelModify) lce.getRight(), context.getStatement() ); + action = routeRelationalDml( (LogicalRelModify) lce.getRight(), context.getStatement() ); } else { throw new IllegalArgumentException(); } @@ -619,7 +619,7 @@ public AlgNode handleConstraintEnforcer( AlgNode alg, RoutingContext context ) { if ( constraint.getLeft() instanceof RelModify ) { return LogicalConstraintEnforcer.create( - routeDml( (LogicalRelModify) constraint.getLeft(), context.getStatement() ), + routeRelationalDml( (LogicalRelModify) constraint.getLeft(), context.getStatement() ), builder.build(), constraint.getExceptionClasses(), constraint.getExceptionMessages() ); @@ -640,7 +640,7 @@ public AlgNode handleBatchIterator( AlgNode alg, RoutingContext context ) { LogicalBatchIterator iterator = (LogicalBatchIterator) alg; AlgNode input; if ( iterator.getInput() instanceof RelModify ) { - input = routeDml( (LogicalRelModify) iterator.getInput(), context.getStatement() ); + input = routeRelationalDml( (LogicalRelModify) iterator.getInput(), context.getStatement() ); } else if ( iterator.getInput() instanceof ConditionalExecute ) { input = handleConditionalExecute( iterator.getInput(), context ); } else if ( iterator.getInput() instanceof ConstraintEnforcer ) { @@ -659,6 +659,14 @@ public AlgNode routeDocumentDml( LogicalDocumentModify alg, Statement statement, LogicalCollection collection = alg.entity.unwrap( LogicalCollection.class ).orElseThrow( () -> new GenericRuntimeException( String.format( "%s is not a collection", alg.entity.name ) ) ); + if ( !collection.modifiable ) { + throw switch ( collection.entityType ) { + case ENTITY -> new GenericRuntimeException( "Unable to modify a collection marked as read-only!" ); + case SOURCE -> new GenericRuntimeException( "The collection '%s' is provided by a data source which does not support data modification.", collection.name ); + case VIEW, MATERIALIZED_VIEW -> new GenericRuntimeException( "Polypheny-DB does not support modifying views." ); + }; + } + List modifies = new ArrayList<>(); List allocs = snapshot.alloc().getFromLogical( collection.id ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java index 52863c217d..6230e8c9dd 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSchema.java @@ -42,7 +42,7 @@ import java.util.Map; import lombok.Getter; import org.jetbrains.annotations.Nullable; -import org.polypheny.db.adapter.DataSource.ExportedColumn; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeFactory.Builder; @@ -90,15 +90,24 @@ public CsvTable createCsvTable( long id, PhysicalTable table, CsvSource csvSourc List fieldIds = new ArrayList<>(); List columns = csvSource.getExportedColumns().get( table.name ); - + if ( columns == null ) { + /* + TODO: The source determines table names by file name. The DdlManager enumerates duplicates like name0, name1 and so forth. + This remains unnoticed by the source. 'getExportedColumns' thus returns the columns under a wrong name. A way must be found to sync the of the logical in the + DdlManager with the name in the source. Currently we just fail the creation of sources with duplicate file names. + */ + + String unenumeratedEntityName = table.name.replaceAll( "\\d+$", "" ); + throw new GenericRuntimeException( "A logical relational entity with name '" + unenumeratedEntityName + "' already exists." ); + } for ( PhysicalColumn column : table.getColumns() ) { AlgDataType sqlType = sqlType( typeFactory, column.type, column.length, column.scale, null ); - fieldInfo.add( column.id, column.name, columns.get( column.position ).physicalColumnName, sqlType ).nullable( column.nullable ); + fieldInfo.add( column.id, column.name, columns.get( column.position ).physicalColumnName(), sqlType ).nullable( column.nullable ); fieldTypes.add( CsvFieldType.getCsvFieldType( column.type ) ); fieldIds.add( column.position ); } - String csvFileName = columns.get( 0 ).physicalSchemaName; + String csvFileName = columns.get( 0 ).physicalSchemaName(); Source source; try { source = Sources.of( new URL( directoryUrl, csvFileName ) ); @@ -111,9 +120,6 @@ public CsvTable createCsvTable( long id, PhysicalTable table, CsvSource csvSourc } - /** - * Creates different subtype of table based on the "flavor" attribute. - */ private CsvTable createTable( long id, Source source, PhysicalTable table, List fieldTypes, int[] fields, CsvSource csvSource ) { return switch ( flavor ) { case TRANSLATABLE -> new CsvTranslatableTable( id, source, table, fieldTypes, fields, csvSource ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index af190419ae..7a1493cb5b 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -35,6 +35,7 @@ import org.polypheny.db.adapter.ConnectionMethod; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingDirectory; @@ -51,6 +52,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; @@ -72,7 +74,7 @@ @AdapterSettingString(subOf = "method_link", defaultValue = "classpath://hr", name = "directoryName", description = "You can select a path to a folder or specific .csv or .csv.gz files.", position = 2) @AdapterSettingInteger(name = "maxStringLength", defaultValue = 255, position = 3, description = "Which length (number of characters including whitespace) should be used for the varchar columns. Make sure this is equal or larger than the longest string in any of the columns.") -public class CsvSource extends DataSource { +public class CsvSource extends DataSource implements RelationalDataSource { private static final Logger log = LoggerFactory.getLogger( CsvSource.class ); @Delegate(excludes = Excludes.class) @@ -87,7 +89,7 @@ public class CsvSource extends DataSource { public CsvSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { - super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ) ); + super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ), Set.of( DataModel.RELATIONAL ) ); this.connectionMethod = settings.containsKey( "method" ) ? ConnectionMethod.from( settings.get( "method" ).toUpperCase() ) : ConnectionMethod.UPLOAD; @@ -338,7 +340,7 @@ protected void reloadSettings( List updatedSettings ) { private void addInformationExportedColumns() { for ( Map.Entry> entry : getExportedColumns().entrySet() ) { - InformationGroup group = new InformationGroup( informationPage, entry.getValue().get( 0 ).physicalSchemaName ); + InformationGroup group = new InformationGroup( informationPage, entry.getValue().get( 0 ).physicalSchemaName() ); informationGroups.add( group ); InformationTable table = new InformationTable( @@ -346,12 +348,12 @@ private void addInformationExportedColumns() { Arrays.asList( "Position", "Column Name", "Type", "Nullable", "Filename", "Primary" ) ); for ( ExportedColumn exportedColumn : entry.getValue() ) { table.addRow( - exportedColumn.physicalPosition, - exportedColumn.name, + exportedColumn.physicalPosition(), + exportedColumn.name(), exportedColumn.getDisplayType(), - exportedColumn.nullable ? "✔" : "", - exportedColumn.physicalSchemaName, - exportedColumn.primary ? "✔" : "" + exportedColumn.nullable() ? "✔" : "", + exportedColumn.physicalSchemaName(), + exportedColumn.primary() ? "✔" : "" ); } informationElements.add( table ); @@ -372,6 +374,12 @@ public void renameLogicalColumn( long id, String newColumnName ) { } + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + + @SuppressWarnings("unused") private interface Excludes { diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index 5f355f985f..18ea5093f7 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -23,6 +23,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; import lombok.Getter; import lombok.experimental.Delegate; @@ -31,6 +32,7 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingBoolean; @@ -42,6 +44,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.plugins.PluginContext; @@ -91,7 +94,7 @@ public void stop() { @AdapterSettingString(name = "ClientUrl", description = "The URL of the ethereum JSON RPC client", defaultValue = "https://mainnet.infura.io/v3/4d06589e97064040b5da99cf4051ef04", position = 1) @AdapterSettingInteger(name = "Blocks", description = "The number of Blocks to fetch when processing a query", defaultValue = 10, position = 2, modifiable = true) @AdapterSettingBoolean(name = "ExperimentalFiltering", description = "Experimentally filter Past Block", defaultValue = false, position = 3, modifiable = true) - public static class EthereumDataSource extends DataSource { + public static class EthereumDataSource extends DataSource implements RelationalDataSource { @Delegate(excludes = Excludes.class) private final RelationalScanDelegate delegate; @@ -105,7 +108,7 @@ public static class EthereumDataSource extends DataSource { public EthereumDataSource( final long storeId, final String uniqueName, final Map settings, DeployMode mode ) { - super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ) ); + super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ), Set.of( DataModel.RELATIONAL ) ); setClientURL( settings.get( "ClientUrl" ) ); this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); @@ -260,17 +263,17 @@ protected void createInformationPage() { for ( Map.Entry> entry : getExportedColumns().entrySet() ) { InformationGroup group = new InformationGroup( informationPage, - entry.getValue().get( 0 ).physicalSchemaName + "." + entry.getValue().get( 0 ).physicalTableName ); + entry.getValue().get( 0 ).physicalSchemaName() + "." + entry.getValue().get( 0 ).physicalTableName() ); InformationTable table = new InformationTable( group, Arrays.asList( "Position", "Column Name", "Type", "Primary" ) ); for ( ExportedColumn exportedColumn : entry.getValue() ) { table.addRow( - exportedColumn.physicalPosition, - exportedColumn.name, + exportedColumn.physicalPosition(), + exportedColumn.name(), exportedColumn.getDisplayType(), - exportedColumn.primary ? "✔" : "" + exportedColumn.primary() ? "✔" : "" ); } informationElements.add( table ); @@ -291,6 +294,12 @@ public void renameLogicalColumn( long id, String newColumnName ) { adapterCatalog.fields.values().stream().filter( c -> c.id == id ).forEach( c -> updateNativePhysical( c.allocId ) ); } + + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + } diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index 9b64d91dc9..0163bfd9ca 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -41,6 +41,7 @@ import org.polypheny.db.adapter.ConnectionMethod; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingDirectory; @@ -56,6 +57,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; @@ -76,7 +78,7 @@ @AdapterSettingString(name = "sheetName", description = "default to read the first sheet", defaultValue = "", required = false) @AdapterSettingInteger(name = "maxStringLength", defaultValue = 255, position = 2, description = "Which length (number of characters including whitespace) should be used for the varchar columns. Make sure this is equal or larger than the longest string in any of the columns.") -public class ExcelSource extends DataSource { +public class ExcelSource extends DataSource implements RelationalDataSource { @Delegate(excludes = Excludes.class) private final RelationalScanDelegate delegate; @@ -90,7 +92,7 @@ public class ExcelSource extends DataSource { public ExcelSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { - super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ) ); + super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ), Set.of( DataModel.RELATIONAL ) ); this.connectionMethod = settings.containsKey( "method" ) ? ConnectionMethod.from( settings.get( "method" ) ) : ConnectionMethod.UPLOAD; // Validate maxStringLength setting @@ -347,7 +349,7 @@ public Map> getExportedColumns() { private void addInformationExportedColumns() { for ( Map.Entry> entry : getExportedColumns().entrySet() ) { - InformationGroup group = new InformationGroup( informationPage, entry.getValue().get( 0 ).physicalSchemaName ); + InformationGroup group = new InformationGroup( informationPage, entry.getValue().get( 0 ).physicalSchemaName() ); informationGroups.add( group ); InformationTable table = new InformationTable( @@ -355,12 +357,12 @@ private void addInformationExportedColumns() { Arrays.asList( "Position", "Column Name", "Type", "Nullable", "Filename", "Primary" ) ); for ( ExportedColumn exportedColumn : entry.getValue() ) { table.addRow( - exportedColumn.physicalPosition, - exportedColumn.name, + exportedColumn.physicalPosition(), + exportedColumn.name(), exportedColumn.getDisplayType(), - exportedColumn.nullable ? "✔" : "", - exportedColumn.physicalSchemaName, - exportedColumn.primary ? "✔" : "" + exportedColumn.nullable() ? "✔" : "", + exportedColumn.physicalSchemaName(), + exportedColumn.primary() ? "✔" : "" ); } informationElements.add( table ); @@ -368,6 +370,12 @@ private void addInformationExportedColumns() { } + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + + @SuppressWarnings("unused") private interface Excludes { diff --git a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java index 14a06c15f5..a8633dd4b2 100644 --- a/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java +++ b/plugins/file-adapter/src/main/java/org/polypheny/db/adapter/file/source/Qfs.java @@ -30,6 +30,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import java.util.StringJoiner; import java.util.stream.Collectors; import lombok.Getter; @@ -39,6 +40,7 @@ import org.jetbrains.annotations.NotNull; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingString; @@ -49,6 +51,7 @@ import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationTable; @@ -69,7 +72,7 @@ usedModes = DeployMode.EMBEDDED, defaultMode = DeployMode.EMBEDDED) @AdapterSettingString(name = "rootDir", defaultValue = "") -public class Qfs extends DataSource { +public class Qfs extends DataSource implements RelationalDataSource { @Delegate(excludes = Exclude.class) private final RelationalScanDelegate delegate; @@ -82,7 +85,7 @@ public class Qfs extends DataSource { public Qfs( long adapterId, String uniqueName, Map settings, DeployMode mode ) { - super( adapterId, uniqueName, settings, mode, true, new RelAdapterCatalog( adapterId ) ); + super( adapterId, uniqueName, settings, mode, true, new RelAdapterCatalog( adapterId ), Set.of( DataModel.RELATIONAL ) ); init( settings ); registerInformationPage( uniqueName ); this.delegate = new RelationalScanDelegate( this, adapterCatalog ); @@ -224,7 +227,6 @@ private static String getString( File whitelist ) { } - @Override public Map> getExportedColumns() { //name, extension, path, mime, canExecute, canRead, canWrite, size, lastModified String physSchemaName = getUniqueName(); @@ -312,7 +314,7 @@ protected void registerInformationPage( String uniqueName ) { int i = 2; for ( Map.Entry> entry : getExportedColumns().entrySet() ) { - InformationGroup group = new InformationGroup( informationPage, entry.getValue().get( 0 ).physicalTableName ).setOrder( i++ ); + InformationGroup group = new InformationGroup( informationPage, entry.getValue().get( 0 ).physicalTableName() ).setOrder( i++ ); im.addGroup( group ); informationGroups.add( group ); @@ -330,17 +332,23 @@ private static InformationTable getInformationTable( Entry { +public class GoogleSheetSource extends DataSource implements RelationalDataSource { @Delegate(excludes = Excludes.class) private final RelationalScanDelegate delegate; @@ -110,7 +113,7 @@ public class GoogleSheetSource extends DataSource { public GoogleSheetSource( final long storeId, final String uniqueName, final Map settings, DeployMode mode ) { - super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ) ); + super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ), Set.of( DataModel.RELATIONAL ) ); this.clientId = getSettingOrFail( "oAuth-Client-ID", settings ); this.clientKey = getSettingOrFail( "oAuth-Client-Key", settings ); @@ -200,19 +203,19 @@ protected void createInformationPage() { for ( Map.Entry> entry : getExportedColumns().entrySet() ) { InformationGroup group = new InformationGroup( informationPage, - entry.getValue().get( 0 ).physicalSchemaName + "." + entry.getValue().get( 0 ).physicalTableName ); + entry.getValue().get( 0 ).physicalSchemaName() + "." + entry.getValue().get( 0 ).physicalTableName() ); InformationTable table = new InformationTable( group, Arrays.asList( "Position", "Column Name", "Type", "Nullable", "Filename", "Primary" ) ); for ( ExportedColumn exportedColumn : entry.getValue() ) { table.addRow( - exportedColumn.physicalPosition, - exportedColumn.name, + exportedColumn.physicalPosition(), + exportedColumn.name(), exportedColumn.getDisplayType(), - exportedColumn.nullable ? "✔" : "", - exportedColumn.physicalSchemaName, - exportedColumn.primary ? "✔" : "" + exportedColumn.nullable() ? "✔" : "", + exportedColumn.physicalSchemaName(), + exportedColumn.primary() ? "✔" : "" ); } informationElements.add( table ); @@ -381,6 +384,12 @@ public void rollback( PolyXid xid ) { } + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + + @SuppressWarnings("unused") private interface Excludes { diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 47f361c263..9d74860284 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -25,12 +25,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import lombok.experimental.Delegate; import lombok.extern.slf4j.Slf4j; import org.apache.commons.dbcp2.BasicDataSource; import org.pf4j.ExtensionPoint; import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.RelationalScanDelegate; import org.polypheny.db.adapter.jdbc.JdbcSchema; import org.polypheny.db.adapter.jdbc.JdbcUtils; @@ -43,6 +45,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.Namespace; @@ -53,7 +56,7 @@ @Slf4j -public abstract class AbstractJdbcSource extends DataSource implements ExtensionPoint { +public abstract class AbstractJdbcSource extends DataSource implements RelationalDataSource, ExtensionPoint { @Delegate(excludes = Exclude.class) private final RelationalScanDelegate delegate; @@ -72,7 +75,7 @@ public AbstractJdbcSource( final String diverClass, final SqlDialect dialect, final boolean readOnly ) { - super( storeId, uniqueName, settings, mode, readOnly, new RelAdapterCatalog( storeId ) ); + super( storeId, uniqueName, settings, mode, readOnly, new RelAdapterCatalog( storeId ), Set.of( DataModel.RELATIONAL ) ); this.connectionFactory = createConnectionFactory( settings, dialect, diverClass ); this.dialect = dialect; // Register the JDBC Pool Size as information in the information manager and enable it @@ -204,7 +207,6 @@ public void rollback( PolyXid xid ) { protected abstract boolean requiresSchema(); - @Override public Map> getExportedColumns() { Map> map = new HashMap<>(); PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); diff --git a/plugins/json-adapter/build.gradle b/plugins/json-adapter/build.gradle new file mode 100644 index 0000000000..680d708322 --- /dev/null +++ b/plugins/json-adapter/build.gradle @@ -0,0 +1,74 @@ +group "org.polypheny" + + +dependencies { + compileOnly project(":core") + compileOnly project(":dbms") + // https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core + implementation group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: jackson_core_version + implementation group: "commons-io", name: "commons-io", version: commons_io_version // Apache 2.0 + + + // --- Test Compile --- + testImplementation project(path: ":dbms", configuration: "test") + testImplementation project(path: ":dbms") + testImplementation project(path: ":core", configuration: "tests") + testImplementation project(path: ":core") + testImplementation group: 'org.junit.jupiter', name: 'junit-jupiter', version: junit_jupiter_version +} + + +sourceSets { + main { + java { + srcDirs = ["src/main/java"] + } + resources { + srcDirs = ["src/main/resources"] + } + output.resourcesDir = file(project.buildDir.absolutePath + "/classes") + } + test { + java { + srcDirs = ["src/test/java"] + destinationDirectory.set(file(project.buildDir.absolutePath + "/test-classes")) + } + resources { + srcDirs = ["src/test/resources"] + } + output.resourcesDir = file(project.buildDir.absolutePath + "/test-classes") + } +} + +compileJava { + dependsOn(":dbms:processResources") + dependsOn(":config:processResources") + dependsOn(":core:processResources") + dependsOn(":information:processResources") +} + +delombok { + dependsOn(":dbms:processResources") +} + +test.dependsOn(":dbms:shadowJar") + +/** + * JARs + */ +jar { + manifest { + attributes "Manifest-Version": "1.0" + attributes "Copyright": "The Polypheny Project (polypheny.org)" + attributes "Version": "$project.version" + } +} +java { + withJavadocJar() + withSourcesJar() +} + +licensee { + allow('MIT') + allow('Apache-2.0') +} diff --git a/plugins/json-adapter/gradle.properties b/plugins/json-adapter/gradle.properties new file mode 100644 index 0000000000..2a4b47ae8a --- /dev/null +++ b/plugins/json-adapter/gradle.properties @@ -0,0 +1,27 @@ +# +# Copyright 2019-2023 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +pluginVersion = 0.0.1 + +pluginId = json-adapter +pluginClass = org.polypheny.db.adapter.json.JsonPlugin +pluginDependencies = +pluginProvider = The Polypheny Project +pluginUrlPath = +pluginCategories = source +pluginPolyDependencies = +pluginIsSystemComponent = false +pluginIsUiVisible = true diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonCollection.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonCollection.java new file mode 100644 index 0000000000..fd6cc05312 --- /dev/null +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonCollection.java @@ -0,0 +1,85 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.json; + +import java.net.URL; +import org.apache.calcite.linq4j.AbstractEnumerable; +import org.apache.calcite.linq4j.Enumerable; +import org.apache.calcite.linq4j.Enumerator; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.DataContext; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.catalogs.DocAdapterCatalog; +import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.plan.AlgCluster; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.schema.types.ScannableEntity; +import org.polypheny.db.schema.types.TranslatableEntity; +import org.polypheny.db.type.entity.PolyValue; + +final class JsonCollection extends PhysicalCollection implements ScannableEntity, TranslatableEntity { + + private final URL url; + private final Adapter adapter; + + + JsonCollection( URL url, PhysicalEntity collection, long allocationId, JsonNamespace namespace, Adapter adapter ) { + super( collection.getId(), allocationId, collection.getLogicalId(), namespace.getId(), collection.getName(), namespace.getName(), adapter.getAdapterId() ); + this.url = url; + this.adapter = adapter; + } + + + @Override + public Expression asExpression() { + Expression argExp = Expressions.constant( this.id ); + return Expressions.convert_( Expressions.call( Expressions.call( this.adapter.asExpression(), "getAdapterCatalog" ), "getPhysical", argExp ), JsonCollection.class ); + } + + + @Override + public Enumerable scan( DataContext dataContext ) { + dataContext.getStatement().getTransaction().registerInvolvedAdapter( adapter ); + return new AbstractEnumerable<>() { + @Override + public Enumerator enumerator() { + return new JsonEnumerator( url ); + } + }; + } + + + public AlgNode toAlg( AlgCluster cluster, AlgTraitSet traitSet ) { + return new JsonScan( cluster, this ); + } + + + public Enumerable project( final DataContext dataContext ) { + dataContext.getStatement().getTransaction().registerInvolvedAdapter( adapter ); + return new AbstractEnumerable<>() { + @Override + public Enumerator enumerator() { + return new JsonEnumerator( url ); + } + }; + } + +} + diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonEnumerator.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonEnumerator.java new file mode 100644 index 0000000000..ef7b168429 --- /dev/null +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonEnumerator.java @@ -0,0 +1,105 @@ +package org.polypheny.db.adapter.json; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.net.URL; +import org.apache.calcite.linq4j.Enumerator; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.type.entity.PolyValue; + +final class JsonEnumerator implements Enumerator { + + private final static ObjectMapper MAPPER = new ObjectMapper(); + private final static JsonToPolyConverter CONVERTER = new JsonToPolyConverter(); + + private final URL url; + private JsonParser parser; + private PolyValue[] current; + private boolean isCollection; + + + JsonEnumerator( URL url ) { + this.url = url; + } + + + private void initializeParser() throws IOException { + if ( this.parser == null ) { + this.parser = new JsonFactory().createParser( url.openStream() ); + JsonToken token = parser.nextToken(); + isCollection = (token == JsonToken.START_ARRAY); + if ( !isCollection && token != JsonToken.START_OBJECT ) { + throw new IllegalArgumentException( "Invalid JSON file format. Expected an array or an object at the top level." ); + } + } + } + + + private JsonNode getNextNode() throws IOException { + if ( parser == null || parser.isClosed() ) { + return null; + } + + if ( isCollection ) { + while ( parser.nextToken() != JsonToken.END_ARRAY ) { + if ( parser.currentToken() != JsonToken.START_OBJECT ) { + continue; + } + return MAPPER.readTree( parser ); + } + } + + JsonNode node = null; + if ( parser.currentToken() == JsonToken.START_OBJECT ) { + node = MAPPER.readTree( parser ); + isCollection = false; + } + return node; + } + + + @Override + public PolyValue[] current() { + return current; + } + + + @Override + public boolean moveNext() { + try { + if ( parser == null ) { + initializeParser(); + } + JsonNode node = getNextNode(); + current = node == null ? null : new PolyValue[]{ CONVERTER.nodeToPolyDocument( node ) }; + return node != null; + } catch ( IOException e ) { + throw new GenericRuntimeException( "Error reading JSON: " + e.getMessage(), e ); + } + } + + + @Override + public void reset() { + close(); + this.parser = null; + current = null; + } + + + @Override + public void close() { + try { + if ( parser != null ) { + parser.close(); + } + } catch ( IOException e ) { + throw new GenericRuntimeException( "Failed to close JSON parser: " + e.getMessage(), e ); + } + } + +} diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonMetaRetriever.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonMetaRetriever.java new file mode 100644 index 0000000000..bfdf6d987f --- /dev/null +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonMetaRetriever.java @@ -0,0 +1,125 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.json; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.file.NoSuchFileException; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import org.polypheny.db.adapter.DocumentDataSource.ExportedDocument; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.util.Source; +import org.polypheny.db.util.Sources; + +final class JsonMetaRetriever { + + static List getDocuments( URL jsonFiles ) throws IOException { + List exportedDocuments = new LinkedList<>(); + Set fileNames = getFileNames( jsonFiles ); + ObjectMapper objectMapper = new ObjectMapper(); + JsonFactory jsonFactory = new JsonFactory( objectMapper ); + + for ( String fileName : fileNames ) { + URL jsonFile = new URL( jsonFiles, fileName ); + try ( InputStream inputStream = jsonFile.openStream(); + JsonParser jsonParser = jsonFactory.createParser( inputStream ) ) { + String entityName = deriveEntityName( jsonFile.getFile() ); + JsonToken token = jsonParser.nextToken(); + if ( token == JsonToken.START_ARRAY || token == JsonToken.START_OBJECT ) { + exportedDocuments.add( new ExportedDocument( entityName, false, EntityType.SOURCE ) ); + } else { + throw new GenericRuntimeException( "JSON file does not contain a valid top-level structure (neither an object nor an array)" ); + } + } + } + return exportedDocuments; + } + + + static URL findDocumentUrl( URL jsonFiles, String name ) throws MalformedURLException, NoSuchFileException { + String[] extensions = { ".json", ".json.gz" }; + String path = jsonFiles.getPath(); + + // handle single file + for ( String ext : extensions ) { + if ( path.endsWith( name + ext ) ) { + return jsonFiles; + } + } + + // handle directory + Set fileNames = getFileNames( jsonFiles ); + for ( String file : fileNames ) { + for ( String ext : extensions ) { + if ( file.equals( name + ext ) ) { + return new URL( jsonFiles, file ); + } + } + } + + throw new NoSuchFileException( "No JSON file(s) found under the URL '" + jsonFiles + "'" ); + } + + + private static Set getFileNames( URL jsonFiles ) throws NoSuchFileException { + Source source = Sources.of( jsonFiles ); + if ( source.isFile() ) { + File file = source.file(); + if ( file.isFile() ) { + // url is file + return Set.of( file.getName() ); + } + + // url is directory + File[] files = file.listFiles( ( d, name ) -> name.endsWith( ".json" ) ); + if ( files == null || files.length == 0 ) { + throw new NoSuchFileException( "No .json files were found." ); + } + return Arrays.stream( files ) + .map( File::getName ) + .collect( Collectors.toSet() ); + } + // url is web source + String filePath = jsonFiles.getPath(); + return Set.of( filePath.substring( filePath.lastIndexOf( '/' ) + 1 ) ); + } + + + private static String deriveEntityName( String fileName ) { + fileName = fileName.replaceAll( "/+$", "" ); // remove trailing "/" + return fileName + .substring( fileName.lastIndexOf( '/' ) + 1 ) // extract file name after last "/" + .toLowerCase() + .replace( ".json.gz", "" ) + .replace( ".json", "" ) + .replaceAll( "[^a-z0-9_]+", "" ) // remove invalid characters + .trim(); + } + +} diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonNamespace.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonNamespace.java new file mode 100644 index 0000000000..81863b4248 --- /dev/null +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonNamespace.java @@ -0,0 +1,55 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.json; + +import java.util.Optional; +import lombok.Getter; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.polypheny.db.plan.Convention; +import org.polypheny.db.schema.Namespace; + +@Getter +final class JsonNamespace extends Namespace { + + private final String name; + + + public JsonNamespace( String name, long id, long adapterId ) { + super( id, adapterId ); + this.name = name; + } + + + @Override + protected @Nullable Convention getConvention() { + return null; + } + + + @Override + public @NotNull Optional unwrap( Class aClass ) { + return super.unwrap( aClass ); + } + + + @Override + public @NotNull C unwrapOrThrow( Class aClass ) { + return super.unwrapOrThrow( aClass ); + } + +} diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonPlugin.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonPlugin.java new file mode 100644 index 0000000000..2d72c235b5 --- /dev/null +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonPlugin.java @@ -0,0 +1,49 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.json; + +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; + +public class JsonPlugin extends PolyPlugin { + + private long id; + + + /** + * Constructor to be used by plugin manager for plugin instantiation. + * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. + */ + + public JsonPlugin( PluginContext context ) { + super( context ); + } + + + @Override + public void afterCatalogInit() { + this.id = AdapterManager.addAdapterTemplate( JsonSource.class, "JSON", JsonSource::new ); + } + + + @Override + public void stop() { + AdapterManager.removeAdapterTemplate( id ); + } + +} diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonProjectScanRule.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonProjectScanRule.java new file mode 100644 index 0000000000..74b32a8105 --- /dev/null +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonProjectScanRule.java @@ -0,0 +1,45 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.json; + +import org.polypheny.db.algebra.core.AlgFactories; +import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; +import org.polypheny.db.plan.AlgOptRule; +import org.polypheny.db.plan.AlgOptRuleCall; +import org.polypheny.db.tools.AlgBuilderFactory; + +final class JsonProjectScanRule extends AlgOptRule { + + static final JsonProjectScanRule INSTANCE = new JsonProjectScanRule( AlgFactories.LOGICAL_BUILDER ); + + + private JsonProjectScanRule( AlgBuilderFactory algBuilderFactory ) { + super( + operand( LogicalDocumentScan.class, none() ), + algBuilderFactory, + "JsonProjectScanRule" + ); + } + + + @Override + public void onMatch( AlgOptRuleCall call ) { + final LogicalDocumentScan scan = call.alg( 0 ); + call.transformTo( new JsonScan( scan.getCluster(), scan.getEntity().unwrapOrThrow( JsonCollection.class ) ) ); + } + +} diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonScan.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonScan.java new file mode 100644 index 0000000000..3fdaf82dfb --- /dev/null +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonScan.java @@ -0,0 +1,93 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.json; + +import java.util.List; +import lombok.Getter; +import org.apache.calcite.linq4j.tree.Blocks; +import org.apache.calcite.linq4j.tree.Expressions; +import org.jetbrains.annotations.NotNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.AlgWriter; +import org.polypheny.db.algebra.core.document.DocumentScan; +import org.polypheny.db.algebra.enumerable.EnumerableAlg; +import org.polypheny.db.algebra.enumerable.EnumerableAlgImplementor; +import org.polypheny.db.algebra.enumerable.EnumerableConvention; +import org.polypheny.db.algebra.enumerable.PhysType; +import org.polypheny.db.algebra.enumerable.PhysTypeImpl; +import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.plan.AlgCluster; +import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgPlanner; +import org.polypheny.db.plan.AlgTraitSet; + +public final class JsonScan extends DocumentScan implements EnumerableAlg { + + @Getter + private final JsonCollection collection; + + + JsonScan( AlgCluster cluster, @NotNull JsonCollection collection ) { + super( cluster, cluster.traitSetOf( EnumerableConvention.INSTANCE ), collection ); + this.collection = collection; + } + + + @Override + public AlgNode copy( AlgTraitSet traitSet, List inputs ) { + assert inputs.isEmpty(); + return new JsonScan( getCluster(), collection ); + } + + + @Override + public AlgWriter explainTerms( AlgWriter pw ) { + return super.explainTerms( pw ); + } + + + @Override + public AlgDataType deriveRowType() { + final List fieldList = entity.getTupleType().getFields(); + return getCluster().getTypeFactory().builder().add( fieldList.get( 0 ) ).build(); + } + + + @Override + public void register( @NotNull AlgPlanner planner ) { + planner.addRule( JsonProjectScanRule.INSTANCE ); + } + + + @Override + public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { + // copied over from the csv project scan rule + return super.computeSelfCost( planner, mq ).multiplyBy( (3D) / ((double) entity.getTupleType().getFieldCount() + 2D) ); + } + + + @Override + public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { + PhysType physType = PhysTypeImpl.of( implementor.getTypeFactory(), getTupleType(), pref.preferArray() ); + + return implementor.result( physType, Blocks.toBlock( Expressions.call( entity.asExpression( JsonCollection.class ), "project", implementor.getRootExpression() ) ) ); + } + +} diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java new file mode 100644 index 0000000000..c90f510c05 --- /dev/null +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java @@ -0,0 +1,292 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.json; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.file.NoSuchFileException; +import java.util.List; +import java.util.Map; +import java.util.Set; +import lombok.experimental.Delegate; +import org.pf4j.Extension; +import org.polypheny.db.adapter.ConnectionMethod; +import org.polypheny.db.adapter.DataSource; +import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DocumentDataSource; +import org.polypheny.db.adapter.DocumentScanDelegate; +import org.polypheny.db.adapter.Scannable; +import org.polypheny.db.adapter.annotations.AdapterProperties; +import org.polypheny.db.adapter.annotations.AdapterSettingList; +import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.catalogs.AdapterCatalog; +import org.polypheny.db.catalog.catalogs.DocAdapterCatalog; +import org.polypheny.db.catalog.entity.allocation.AllocationCollection; +import org.polypheny.db.catalog.entity.allocation.AllocationGraph; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; +import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.prepare.Context; +import org.polypheny.db.schema.Namespace; +import org.polypheny.db.transaction.PolyXid; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Extension +@AdapterProperties( + name = "JSON", + description = "An adapter for querying JSON files. A single JSON file or a directory containing multiple JSON files can be specified by path. Currently, this adapter only supports read operations.", + usedModes = DeployMode.EMBEDDED, + defaultMode = DeployMode.EMBEDDED) +@AdapterSettingList(name = "method", options = { "link", "url" }, defaultValue = "upload", description = "If the supplied file(s) should be uploaded or a link to the local filesystem is used (sufficient permissions are required).", position = 1) +//@AdapterSettingDirectory(subOf = "method_upload", name = "directory", defaultValue = "classpath://articles.json", description = "Path to the JSON file(s) to be integrated as this source.", position = 2) +@AdapterSettingString(subOf = "method_link", defaultValue = "classpath://articles.json", name = "directoryName", description = "Path to the JSON file(s) to be integrated as this source.", position = 2) +@AdapterSettingString(subOf = "method_url", defaultValue = "http://localhost/articles.json", name = "url", description = "URL to the JSON file(s) to be integrated as this source.", position = 2) + +public class JsonSource extends DataSource implements DocumentDataSource, Scannable { + + private static final Logger log = LoggerFactory.getLogger( JsonSource.class ); + @Delegate(excludes = Excludes.class) + private final DocumentScanDelegate delegate; + private JsonNamespace namespace; + private final ConnectionMethod connectionMethod; + private URL jsonFiles; + + + public JsonSource( final long storeId, final String uniqueName, final Map settings, DeployMode mode ) { + super( storeId, uniqueName, settings, mode, true, new DocAdapterCatalog( storeId ), Set.of( DataModel.DOCUMENT ) ); + this.connectionMethod = settings.containsKey( "method" ) ? ConnectionMethod.from( settings.get( "method" ).toUpperCase() ) : ConnectionMethod.UPLOAD; + this.jsonFiles = getJsonFilesUrl( settings ); + this.delegate = new DocumentScanDelegate( this, getAdapterCatalog() ); + long namespaceId = Catalog.getInstance().createNamespace( uniqueName, DataModel.DOCUMENT, true ); + this.namespace = new JsonNamespace( uniqueName, namespaceId, getAdapterId() ); + } + + + @Override + protected void reloadSettings( List updatedSettings ) { + if ( updatedSettings.contains( "directory" ) ) { + this.jsonFiles = getJsonFilesUrl( settings ); + } + } + + + private URL getJsonFilesUrl( final Map settings ) { + return switch ( connectionMethod ) { + case LINK -> { + String files = settings.get( "directoryName" ); + if ( files.startsWith( "classpath://" ) ) { + yield this.getClass().getClassLoader().getResource( files.replace( "classpath://", "" ) ); + } + try { + yield new File( files ).toURI().toURL(); + } catch ( MalformedURLException e ) { + throw new GenericRuntimeException( e ); + } + } + case UPLOAD -> { + String files = settings.get( "directory" ); + if ( files.startsWith( "classpath://" ) ) { + yield this.getClass().getClassLoader().getResource( files.replace( "classpath://", "" ) + "/" ); + } + try { + yield new File( files ).toURI().toURL(); + } catch ( MalformedURLException e ) { + throw new GenericRuntimeException( e ); + } + } + case URL -> { + String files = settings.get( "url" ); + try { + yield new URL( files ); + } catch ( MalformedURLException e ) { + throw new GenericRuntimeException( e ); + } + } + }; + } + + + @Override + public void updateNamespace( String name, long id ) { + namespace = new JsonNamespace( name, id, adapterId ); + } + + + @Override + public Namespace getCurrentNamespace() { + return namespace; + } + + + @Override + public void shutdown() { + removeInformationPage(); + } + + + @Override + public List getExportedCollection() { + try { + return JsonMetaRetriever.getDocuments( jsonFiles ); + } catch ( IOException e ) { + throw new GenericRuntimeException( "Failed to retrieve documents from json file." ); + } + } + + + @Override + public AdapterCatalog getCatalog() { + return adapterCatalog; + } + + + @Override + public void restoreCollection( AllocationCollection allocation, List entities, Context context ) { + PhysicalEntity collection = entities.get( 0 ); // TODO: set breakpoint and take a look at what's in this list... + updateNamespace( collection.getNamespaceName(), collection.getNamespaceId() ); + try { + PhysicalCollection physicalCollection = new JsonCollection( JsonMetaRetriever.findDocumentUrl( jsonFiles, collection.getName() ), collection, allocation.getId(), namespace, this ); + adapterCatalog.addPhysical( allocation, physicalCollection ); + } catch ( MalformedURLException | NoSuchFileException e ) { + throw new GenericRuntimeException( e ); + } + } + + + @Override + public List createCollection( Context context, LogicalCollection logical, AllocationCollection allocation ) { + PhysicalCollection collection = adapterCatalog.createCollection( + logical.getNamespaceName(), + logical.getName(), + logical, + allocation + ); + try { + PhysicalCollection physicalCollection = new JsonCollection( JsonMetaRetriever.findDocumentUrl( jsonFiles, collection.getName() ), collection, allocation.getId(), namespace, this ); + adapterCatalog.replacePhysical( physicalCollection ); + return List.of( physicalCollection ); + } catch ( MalformedURLException | NoSuchFileException e ) { + throw new GenericRuntimeException( e ); + } + } + + + @Override + public void dropCollection( Context context, AllocationCollection allocation ) { + // TODO: What is this supposed to do? + } + + + @Override + public void renameLogicalColumn( long id, String newColumnName ) { + // TODO: Ask David: Why is this part of this interface? + log.debug( "NOT SUPPORTED: JSON source does not support method renameLogicalColumn()" ); + } + + + @Override + public void truncate( Context context, long allocId ) { + log.debug( "NOT SUPPORTED: JSON source does not support method truncate()." ); + } + + + @Override + public boolean prepare( PolyXid xid ) { + log.debug( "NOT SUPPORTED: JSON source does not support method prepare()." ); + return true; + } + + + @Override + public void commit( PolyXid xid ) { + log.debug( "NOT SUPPORTED: JSON source does not support method commit()." ); + } + + + @Override + public void rollback( PolyXid xid ) { + log.debug( "NOT SUPPORTED: JSON source does not support method rollback()." ); + } + + + @Override + public void dropTable( Context context, long allocId ) { + log.debug( "NOT SUPPORTED: JSON source does not support method dropTable()" ); + } + + + @Override + public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + log.debug( "NOT SUPPORTED: JSON source does not support method createTable()." ); + return null; + } + + + @Override + public void restoreTable( AllocationTable alloc, List entities, Context context ) { + log.debug( "NOT SUPPORTED: JSON source does not support method restoreTable()." ); + } + + + @Override + public List createGraph( Context context, LogicalGraph logical, AllocationGraph allocation ) { + log.debug( "NOT SUPPORTED: JSON source does not support method createGraph()" ); + return null; + } + + + @Override + public void dropGraph( Context context, AllocationGraph allocation ) { + log.debug( "NOT SUPPORTED: JSON source does not support method dropGraph()" ); + } + + + @Override + public void restoreGraph( AllocationGraph alloc, List entities, Context context ) { + log.debug( "NOT SUPPORTED: JSON source does not support method restoreGraph()." ); + } + + + @Override + public DocumentDataSource asDocumentDataSource() { + return this; + } + + + private interface Excludes { + + void refreshCollection( long allocId ); + + void createCollection( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ); + + void restoreCollection( AllocationTable alloc, List entities ); + + } + +} + + diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonToPolyConverter.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonToPolyConverter.java new file mode 100644 index 0000000000..68e75699f1 --- /dev/null +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonToPolyConverter.java @@ -0,0 +1,79 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.json; + +import com.fasterxml.jackson.databind.JsonNode; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.StreamSupport; +import org.polypheny.db.type.entity.PolyBoolean; +import org.polypheny.db.type.entity.PolyList; +import org.polypheny.db.type.entity.PolyNull; +import org.polypheny.db.type.entity.PolyString; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.type.entity.category.PolyNumber; +import org.polypheny.db.type.entity.document.PolyDocument; +import org.polypheny.db.type.entity.numerical.PolyDouble; +import org.polypheny.db.type.entity.numerical.PolyLong; +import org.polypheny.db.type.entity.relational.PolyMap; + +public final class JsonToPolyConverter { + + public PolyDocument nodeToPolyDocument( JsonNode node ) { + return new PolyDocument( nodeToPolyMap( node ) ); + } + + + private PolyMap nodeToPolyMap( JsonNode node ) { + Map map = new HashMap<>(); + node.fields().forEachRemaining( entry -> { + PolyString key = new PolyString( entry.getKey() ); + PolyValue value = nodeToPolyValue( entry.getValue() ); + map.put( key, value ); + } ); + return PolyMap.of( map ); + } + + + public PolyValue nodeToPolyValue( JsonNode node ) { + return switch ( node.getNodeType() ) { + case NULL -> new PolyNull(); + case ARRAY -> nodeToPolyList( node ); + case OBJECT -> nodeToPolyMap( node ); + case NUMBER -> nodeToPolyNumber( node ); + case STRING -> new PolyString( node.asText() ); + case BOOLEAN -> new PolyBoolean( node.asBoolean() ); + case BINARY, MISSING, POJO -> new PolyNull(); + }; + } + + + private PolyNumber nodeToPolyNumber( JsonNode node ) { + if ( node.isIntegralNumber() ) { + return new PolyLong( node.asLong() ); + } + return new PolyDouble( node.asDouble() ); + } + + + private PolyValue nodeToPolyList( JsonNode node ) { + return PolyList.of( StreamSupport.stream( node.spliterator(), false ) + .map( this::nodeToPolyValue ) + .toList() ); + } + +} diff --git a/plugins/json-adapter/src/main/resources/articles.json b/plugins/json-adapter/src/main/resources/articles.json new file mode 100644 index 0000000000..ae67f53fc8 --- /dev/null +++ b/plugins/json-adapter/src/main/resources/articles.json @@ -0,0 +1,66 @@ +[ + { + "id": 1, + "title": "Unlocking the Secrets of Deep Space", + "author": "Samantha Ray", + "published_date": "2024-04-24", + "is_featured": true, + "rating": 4.7 + }, + { + "id": 2, + "title": "Revolutionizing Agriculture with AI", + "author": "Marcus O'Neill", + "published_date": "2024-04-10", + "is_featured": false, + "rating": 4.2 + }, + { + "id": 3, + "title": "The Future of Urban Transportation", + "author": "Elena Gomez", + "published_date": "2024-03-29", + "is_featured": true, + "rating": 4.9 + }, + { + "id": 4, + "title": "Ancient Civilizations: Mysteries Unveiled", + "author": "Thomas Crane", + "published_date": "2024-01-15", + "is_featured": false, + "rating": 3.8 + }, + { + "id": 5, + "title": "The Psychology of Pandemic Fatigue", + "author": "Laura McPherson", + "published_date": "2024-02-27", + "is_featured": true, + "rating": 4.6 + }, + { + "id": 6, + "title": "Exploring the Depths of the Amazon Rainforest", + "author": "David Liu", + "published_date": "2024-03-13", + "is_featured": false, + "rating": 4.3 + }, + { + "id": 7, + "title": "New Horizons in Genetic Engineering", + "author": "Nadia Singh", + "published_date": "2024-04-02", + "is_featured": true, + "rating": 4.8 + }, + { + "id": 8, + "title": "Artificial Intelligence and Ethics", + "author": "James Lewis", + "published_date": "2024-04-18", + "is_featured": false, + "rating": 4.4 + } +] diff --git a/plugins/json-adapter/src/test/java/org/polypheny/db/test/JsonToPolyConverterTest.java b/plugins/json-adapter/src/test/java/org/polypheny/db/test/JsonToPolyConverterTest.java new file mode 100644 index 0000000000..c8798bb9d5 --- /dev/null +++ b/plugins/json-adapter/src/test/java/org/polypheny/db/test/JsonToPolyConverterTest.java @@ -0,0 +1,181 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.polypheny.db.TestHelper; +import org.polypheny.db.adapter.json.JsonToPolyConverter; +import org.polypheny.db.type.entity.PolyList; +import org.polypheny.db.type.entity.PolyNull; +import org.polypheny.db.type.entity.PolyString; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.type.entity.document.PolyDocument; +import org.polypheny.db.type.entity.relational.PolyMap; + +public class JsonToPolyConverterTest { + + private static ObjectMapper mapper; + private static JsonToPolyConverter builder; + + + @BeforeAll + public static void setup() { + TestHelper testHelper = TestHelper.getInstance(); + mapper = new ObjectMapper(); + builder = new JsonToPolyConverter(); + + } + + + private JsonNode getNodesFromJson( String json ) throws JsonProcessingException { + return mapper.readTree( json ); + } + + + @Test + public void testString() throws JsonProcessingException { + JsonNode node = getNodesFromJson( "{\"name\": \"Maxine\"}" ); + PolyValue value = builder.nodeToPolyValue( node ); + assertTrue( value.isMap() ); + PolyMap map = value.asMap(); + assertEquals( "Maxine", map.get( new PolyString( "name" ) ).toString() ); + } + + + @Test + public void testLong() throws JsonProcessingException { + JsonNode node = getNodesFromJson( "{\"integer\": 492943}" ); + PolyValue value = builder.nodeToPolyValue( node ); + assertTrue( value.isMap() ); + PolyMap map = value.asMap(); + assertEquals( 492943, map.get( new PolyString( "integer" ) ).asLong().value ); + } + + + @Test + public void testDouble() throws JsonProcessingException { + JsonNode node = getNodesFromJson( "{\"double\": -650825.13}" ); + PolyValue value = builder.nodeToPolyValue( node ); + assertTrue( value.isMap() ); + PolyMap map = value.asMap(); + assertEquals( -650825.13, map.get( new PolyString( "double" ) ).asDouble().value ); + } + + + @Test + public void testBooleanTrue() throws JsonProcessingException { + JsonNode node = getNodesFromJson( "{\"boolean\": true}" ); + PolyValue value = builder.nodeToPolyValue( node ); + assertTrue( value.isMap() ); + PolyMap map = value.asMap(); + assertEquals( true, map.get( new PolyString( "boolean" ) ).asBoolean().value ); + } + + + @Test + public void testBooleanFalse() throws JsonProcessingException { + JsonNode node = getNodesFromJson( "{\"boolean\": false}" ); + PolyValue value = builder.nodeToPolyValue( node ); + assertTrue( value.isMap() ); + PolyMap map = value.asMap(); + assertEquals( false, map.get( new PolyString( "boolean" ) ).asBoolean().value ); + } + + + @Test + public void testArray() throws JsonProcessingException { + JsonNode node = getNodesFromJson( "{\"integers\": [0, 1, 2, 3]}" ); + PolyValue value = builder.nodeToPolyValue( node ); + assertTrue( value.isMap() ); + PolyMap map = value.asMap(); + assertTrue( map.get( new PolyString( "integers" ) ).isList() ); + PolyList list = map.get( new PolyString( "integers" ) ).asList(); + assertEquals( 0, list.get( 0 ).asLong().value ); + assertEquals( 1, list.get( 1 ).asLong().value ); + assertEquals( 2, list.get( 2 ).asLong().value ); + assertEquals( 3, list.get( 3 ).asLong().value ); + } + + + @Test + public void testNull() throws JsonProcessingException { + JsonNode node = getNodesFromJson( "{\"null\": null}" ); + PolyValue value = builder.nodeToPolyValue( node ); + assertTrue( value.isMap() ); + PolyMap map = value.asMap(); + assertTrue( map.get( new PolyString( "null" ) ).isNull() ); + } + + + @Test + public void testDocument() throws IOException { + String json = "{" + + "\"string\": \"Hello, JSON!\"," + + "\"number\": 12345.678," + + "\"boolean\": true," + + "\"null\": null," + + "\"object\": {" + + " \"nestedString\": \"Inside JSON\"," + + " \"nestedNumber\": 9876" + + "}," + + "\"array\": [" + + " \"item1\"," + + " 234," + + " false," + + " null" + + "]" + + "}"; + + JsonNode node = getNodesFromJson( json ); + PolyValue value = builder.nodeToPolyDocument( node ); + assertTrue( value.isDocument() ); + PolyDocument doc = value.asDocument(); + assertEquals( "Hello, JSON!", doc.get( new PolyString( "string" ) ).asString().getValue() ); + assertEquals( 12345.678, doc.get( new PolyString( "number" ) ).asDouble().getValue() ); + assertEquals( true, doc.get( new PolyString( "boolean" ) ).asBoolean().getValue() ); + assertEquals( new PolyNull(), doc.get( new PolyString( "null" ) ).asNull() ); + + // check nested object + assertTrue( doc.get( new PolyString( "object" ) ).isMap() ); + PolyMap nestedObject = doc.get( new PolyString( "object" ) ).asMap(); + assertEquals( "Inside JSON", nestedObject.get( new PolyString( "nestedString" ) ).asString().getValue() ); + assertEquals( 9876, nestedObject.get( new PolyString( "nestedNumber" ) ).asLong().getValue() ); + + // check array + assertTrue( doc.get( new PolyString( "array" ) ).isList() ); + PolyList list = doc.get( new PolyString( "array" ) ).asList(); + assertTrue( list.get( 0 ).isString() ); + assertTrue( list.get( 1 ).isLong() ); + assertTrue( list.get( 2 ).isBoolean() ); + assertTrue( list.get( 3 ).isNull() ); + + assertEquals( "item1", list.get( 0 ).asString().getValue() ); + assertEquals( 234, list.get( 1 ).asLong().getValue() ); + assertEquals( false, list.get( 2 ).asBoolean().getValue() ); + assertEquals( new PolyNull(), list.get( 3 ).asNull() ); + } + + +} diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index cc69825079..749022e225 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -24,6 +24,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.dbcp2.BasicDataSource; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingString; @@ -128,4 +129,10 @@ public List createTable( Context context, LogicalTableWrapper lo return List.of( physical ); } + + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + } diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index f2d48a06d5..ddf811541d 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -24,6 +24,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; @@ -142,6 +143,12 @@ protected boolean requiresSchema() { return false; } + + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + } } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 8cb1b3c55b..6834741f05 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -23,6 +23,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; @@ -116,4 +117,9 @@ public List createTable( Context context, LogicalTableWrapper lo } + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + } diff --git a/plugins/xml-adapter/build.gradle b/plugins/xml-adapter/build.gradle new file mode 100644 index 0000000000..33bac4864f --- /dev/null +++ b/plugins/xml-adapter/build.gradle @@ -0,0 +1,75 @@ +group "org.polypheny" + + +dependencies { + compileOnly project(":core") + compileOnly project(":dbms") + // https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-core + implementation group: 'com.fasterxml.jackson.core', name: 'jackson-core', version: jackson_core_version + implementation 'commons-codec:commons-codec:1.17.0' + implementation group: "commons-io", name: "commons-io", version: commons_io_version // Apache 2.0 + + + // --- Test Compile --- + testImplementation project(path: ":dbms", configuration: "test") + testImplementation project(path: ":dbms") + testImplementation project(path: ":core", configuration: "tests") + testImplementation project(path: ":core") + testImplementation group: 'org.junit.jupiter', name: 'junit-jupiter', version: junit_jupiter_version +} + + +sourceSets { + main { + java { + srcDirs = ["src/main/java"] + } + resources { + srcDirs = ["src/main/resources"] + } + output.resourcesDir = file(project.buildDir.absolutePath + "/classes") + } + test { + java { + srcDirs = ["src/test/java"] + destinationDirectory.set(file(project.buildDir.absolutePath + "/test-classes")) + } + resources { + srcDirs = ["src/test/resources"] + } + output.resourcesDir = file(project.buildDir.absolutePath + "/test-classes") + } +} + +compileJava { + dependsOn(":dbms:processResources") + dependsOn(":config:processResources") + dependsOn(":core:processResources") + dependsOn(":information:processResources") +} + +delombok { + dependsOn(":dbms:processResources") +} + +test.dependsOn(":dbms:shadowJar") + +/** + * JARs + */ +jar { + manifest { + attributes "Manifest-Version": "1.0" + attributes "Copyright": "The Polypheny Project (polypheny.org)" + attributes "Version": "$project.version" + } +} +java { + withJavadocJar() + withSourcesJar() +} + +licensee { + allow('MIT') + allow('Apache-2.0') +} diff --git a/plugins/xml-adapter/gradle.properties b/plugins/xml-adapter/gradle.properties new file mode 100644 index 0000000000..12f86d1179 --- /dev/null +++ b/plugins/xml-adapter/gradle.properties @@ -0,0 +1,27 @@ +# +# Copyright 2019-2023 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +pluginVersion = 0.0.1 + +pluginId = xml-adapter +pluginClass = org.polypheny.db.adapter.xml.XmlPlugin +pluginDependencies = +pluginProvider = The Polypheny Project +pluginUrlPath = +pluginCategories = source +pluginPolyDependencies = +pluginIsSystemComponent = false +pluginIsUiVisible = true diff --git a/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlCollection.java b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlCollection.java new file mode 100644 index 0000000000..75d328bb79 --- /dev/null +++ b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlCollection.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.xml; + +import java.net.URL; +import org.apache.calcite.linq4j.AbstractEnumerable; +import org.apache.calcite.linq4j.Enumerable; +import org.apache.calcite.linq4j.Enumerator; +import org.apache.calcite.linq4j.tree.Expression; +import org.apache.calcite.linq4j.tree.Expressions; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.DataContext; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.catalogs.DocAdapterCatalog; +import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.plan.AlgCluster; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.schema.types.ScannableEntity; +import org.polypheny.db.schema.types.TranslatableEntity; +import org.polypheny.db.type.entity.PolyValue; + +final class XmlCollection extends PhysicalCollection implements ScannableEntity, TranslatableEntity { + + private final URL url; + private final Adapter adapter; + + + XmlCollection( URL url, PhysicalEntity collection, long allocationId, XmlNamespace namespace, Adapter adapter ) { + super( collection.getId(), allocationId, collection.getLogicalId(), namespace.getId(), collection.getName(), namespace.getName(), adapter.getAdapterId() ); + this.url = url; + this.adapter = adapter; + } + + + @Override + public Expression asExpression() { + Expression argExp = Expressions.constant( this.id ); + return Expressions.convert_( Expressions.call( Expressions.call( this.adapter.asExpression(), "getAdapterCatalog" ), "getPhysical", argExp ), XmlCollection.class ); + } + + + @Override + public Enumerable scan( DataContext dataContext ) { + dataContext.getStatement().getTransaction().registerInvolvedAdapter( adapter ); + return new AbstractEnumerable<>() { + @Override + public Enumerator enumerator() { + return new XmlEnumerator( url ); + } + }; + } + + + public AlgNode toAlg( AlgCluster cluster, AlgTraitSet traitSet ) { + return new XmlScan( cluster, this ); + } + + + public Enumerable project( final DataContext dataContext ) { + dataContext.getStatement().getTransaction().registerInvolvedAdapter( adapter ); + return new AbstractEnumerable<>() { + @Override + public Enumerator enumerator() { + return new XmlEnumerator( url ); + } + }; + } + +} diff --git a/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlEnumerator.java b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlEnumerator.java new file mode 100644 index 0000000000..042fc94a48 --- /dev/null +++ b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlEnumerator.java @@ -0,0 +1,106 @@ +package org.polypheny.db.adapter.xml; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamConstants; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; +import org.apache.calcite.linq4j.Enumerator; +import org.apache.commons.codec.DecoderException; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.type.entity.PolyValue; + +final class XmlEnumerator implements Enumerator { + + private final static XmlToPolyConverter CONVERTER = new XmlToPolyConverter(); + + private final URL url; + private XMLStreamReader reader; + private String rootElementName; + private PolyValue[] current; + + + XmlEnumerator( URL url ) { + this.url = url; + initializeReader(); + } + + + private void initializeReader() { + try { + XMLInputFactory factory = XMLInputFactory.newInstance(); + InputStream inputStream = url.openStream(); + reader = factory.createXMLStreamReader( inputStream ); + + while ( reader.hasNext() && reader.next() != XMLStreamConstants.START_ELEMENT ) + ; + rootElementName = reader.getLocalName(); + if ( !reader.hasNext() ) { + throw new GenericRuntimeException( "Unexpected end of stream" ); + } + do { + if ( !reader.hasNext() ) { + return; + } + reader.next(); + } while ( reader.getEventType() != XMLStreamConstants.START_ELEMENT ); + } catch ( XMLStreamException | IOException e ) { + throw new GenericRuntimeException( "Error initializing XML reader: " + e.getMessage(), e ); + } + } + + + @Override + public PolyValue[] current() { + return current; + } + + + @Override + public boolean moveNext() { + try { + String documentOuterName = reader.getLocalName(); + reader.next(); + current = new PolyValue[]{ CONVERTER.toPolyDocument( reader, documentOuterName ) }; + if ( !reader.hasNext() ) { + return false; + } + if ( reader.next() == XMLStreamConstants.END_ELEMENT && rootElementName.equals( reader.getLocalName() ) ) { + return false; + } + while ( reader.getEventType() != XMLStreamConstants.START_ELEMENT ) { + if ( !reader.hasNext() ) { + return false; + } + reader.next(); + } + return true; + } catch ( XMLStreamException | DecoderException e ) { + throw new GenericRuntimeException( "Filed to get next document.", e ); + } + } + + + @Override + public void reset() { + close(); + initializeReader(); + } + + + @Override + public void close() { + if ( reader != null ) { + try { + reader.close(); + } catch ( XMLStreamException e ) { + throw new GenericRuntimeException( "Error closing XML reader: " + e.getMessage(), e ); + } + } + reader = null; + current = null; + } + +} diff --git a/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlMetaRetriever.java b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlMetaRetriever.java new file mode 100644 index 0000000000..79ae363720 --- /dev/null +++ b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlMetaRetriever.java @@ -0,0 +1,112 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.xml; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.file.NoSuchFileException; +import java.util.Arrays; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import org.polypheny.db.adapter.DocumentDataSource.ExportedDocument; +import org.polypheny.db.catalog.logistic.EntityType; +import org.polypheny.db.util.Source; +import org.polypheny.db.util.Sources; + +final class XmlMetaRetriever { + + static List getDocuments( URL xmlFiles ) throws IOException { + List exportedDocuments = new LinkedList<>(); + Set fileNames = getFileNames( xmlFiles ); + + for ( String fileName : fileNames ) { + URL xmlFile = new URL( xmlFiles, fileName ); + String entityName = deriveEntityName( xmlFile.getFile() ); + try ( InputStream inputStream = xmlFile.openStream() ) { + if ( inputStream != null ) { + exportedDocuments.add( new ExportedDocument( entityName, false, EntityType.SOURCE ) ); + } + } + } + return exportedDocuments; + } + + + static URL findDocumentUrl( URL xmlFiles, String name ) throws MalformedURLException, NoSuchFileException { + String[] extensions = { ".xml", ".xml.gz" }; + String path = xmlFiles.getPath(); + + for ( String ext : extensions ) { + if ( path.endsWith( name + ext ) ) { + return xmlFiles; + } + } + + Set fileNames = getFileNames( xmlFiles ); + for ( String file : fileNames ) { + for ( String ext : extensions ) { + if ( file.equals( name + ext ) ) { + return new URL( xmlFiles, file ); + } + } + } + + throw new NoSuchFileException( "No XML file(s) found under the URL '" + xmlFiles + "'" ); + } + + + private static Set getFileNames( URL xmlFiles ) throws NoSuchFileException { + Source source = Sources.of( xmlFiles ); + if ( source.isFile() ) { + File file = source.file(); + if ( file.isFile() ) { + // url is file + return Set.of( file.getName() ); + } + + // url is directory + File[] files = file.listFiles( ( d, name ) -> name.endsWith( ".xml" ) ); + if ( files == null || files.length == 0 ) { + throw new NoSuchFileException( "No .xml files were found." ); + } + return Arrays.stream( files ) + .map( File::getName ) + .collect( Collectors.toSet() ); + } + // url is web source + String filePath = xmlFiles.getPath(); + return Set.of( filePath.substring( filePath.lastIndexOf( '/' ) + 1 ) ); + } + + + private static String deriveEntityName( String fileName ) { + fileName = fileName.replaceAll( "/+$", "" ); // remove trailing "/" + return fileName + .substring( fileName.lastIndexOf( '/' ) + 1 ) // extract file name after last "/" + .toLowerCase() + .replace( ".xml.gz", "" ) + .replace( ".xml", "" ) + .replaceAll( "[^a-z0-9_]+", "" ) // remove invalid characters + .trim(); + } + +} diff --git a/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlNamespace.java b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlNamespace.java new file mode 100644 index 0000000000..1da110cbe6 --- /dev/null +++ b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlNamespace.java @@ -0,0 +1,55 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.xml; + +import java.util.Optional; +import lombok.Getter; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.polypheny.db.plan.Convention; +import org.polypheny.db.schema.Namespace; + +@Getter +final class XmlNamespace extends Namespace { + + private final String name; + + + XmlNamespace( String name, long id, long adapterId ) { + super( id, adapterId ); + this.name = name; + } + + + @Override + protected @Nullable Convention getConvention() { + return null; + } + + + @Override + public @NotNull Optional unwrap( Class aClass ) { + return super.unwrap( aClass ); + } + + + @Override + public @NotNull C unwrapOrThrow( Class aClass ) { + return super.unwrapOrThrow( aClass ); + } + +} diff --git a/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlPlugin.java b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlPlugin.java new file mode 100644 index 0000000000..eb5f4766e0 --- /dev/null +++ b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlPlugin.java @@ -0,0 +1,49 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.xml; + +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; + +public class XmlPlugin extends PolyPlugin { + + private long id; + + + /** + * Constructor to be used by plugin manager for plugin instantiation. + * Your plugins have to provide constructor with this exact signature to be successfully loaded by manager. + */ + + public XmlPlugin( PluginContext context ) { + super( context ); + } + + + @Override + public void afterCatalogInit() { + this.id = AdapterManager.addAdapterTemplate( XmlSource.class, "XML", XmlSource::new ); + } + + + @Override + public void stop() { + AdapterManager.removeAdapterTemplate( id ); + } + +} diff --git a/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlProjectScanRule.java b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlProjectScanRule.java new file mode 100644 index 0000000000..63dacf3c46 --- /dev/null +++ b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlProjectScanRule.java @@ -0,0 +1,45 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.xml; + +import org.polypheny.db.algebra.core.AlgFactories; +import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; +import org.polypheny.db.plan.AlgOptRule; +import org.polypheny.db.plan.AlgOptRuleCall; +import org.polypheny.db.tools.AlgBuilderFactory; + +final class XmlProjectScanRule extends AlgOptRule { + + static final XmlProjectScanRule INSTANCE = new XmlProjectScanRule( AlgFactories.LOGICAL_BUILDER ); + + + private XmlProjectScanRule( AlgBuilderFactory algBuilderFactory ) { + super( + operand( LogicalDocumentScan.class, none() ), + algBuilderFactory, + "XmlProjectScanRule" + ); + } + + + @Override + public void onMatch( AlgOptRuleCall call ) { + final LogicalDocumentScan scan = call.alg( 0 ); + call.transformTo( new XmlScan( scan.getCluster(), scan.getEntity().unwrapOrThrow( XmlCollection.class ) ) ); + } + +} diff --git a/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlScan.java b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlScan.java new file mode 100644 index 0000000000..ab44511b2c --- /dev/null +++ b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlScan.java @@ -0,0 +1,92 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.xml; + +import java.util.List; +import lombok.Getter; +import org.apache.calcite.linq4j.tree.Blocks; +import org.apache.calcite.linq4j.tree.Expressions; +import org.jetbrains.annotations.NotNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.AlgWriter; +import org.polypheny.db.algebra.core.document.DocumentScan; +import org.polypheny.db.algebra.enumerable.EnumerableAlg; +import org.polypheny.db.algebra.enumerable.EnumerableAlgImplementor; +import org.polypheny.db.algebra.enumerable.EnumerableConvention; +import org.polypheny.db.algebra.enumerable.PhysType; +import org.polypheny.db.algebra.enumerable.PhysTypeImpl; +import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.plan.AlgCluster; +import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgPlanner; +import org.polypheny.db.plan.AlgTraitSet; + +public final class XmlScan extends DocumentScan implements EnumerableAlg { + + @Getter + private final XmlCollection collection; + + + XmlScan( AlgCluster cluster, @NotNull XmlCollection collection ) { + super( cluster, cluster.traitSetOf( EnumerableConvention.INSTANCE ), collection ); + this.collection = collection; + } + + + @Override + public AlgNode copy( AlgTraitSet traitSet, List inputs ) { + assert inputs.isEmpty(); + return new XmlScan( getCluster(), collection ); + } + + + @Override + public AlgWriter explainTerms( AlgWriter pw ) { + return super.explainTerms( pw ); + } + + + @Override + public AlgDataType deriveRowType() { + final List fieldList = entity.getTupleType().getFields(); + return getCluster().getTypeFactory().builder().add( fieldList.get( 0 ) ).build(); + } + + + @Override + public void register( @NotNull AlgPlanner planner ) { + planner.addRule( XmlProjectScanRule.INSTANCE ); + } + + + @Override + public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { + // copied over from the csv project scan rule + return super.computeSelfCost( planner, mq ).multiplyBy( ((double) 1 + 2D) / ((double) entity.getTupleType().getFieldCount() + 2D) ); + } + + + @Override + public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { + PhysType physType = PhysTypeImpl.of( implementor.getTypeFactory(), getTupleType(), pref.preferArray() ); + + return implementor.result( physType, Blocks.toBlock( Expressions.call( entity.asExpression( XmlCollection.class ), "project", implementor.getRootExpression() ) ) ); + } + +} diff --git a/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlSource.java b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlSource.java new file mode 100644 index 0000000000..c996041d9b --- /dev/null +++ b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlSource.java @@ -0,0 +1,290 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.xml; + +import java.io.File; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.file.NoSuchFileException; +import java.util.List; +import java.util.Map; +import java.util.Set; +import lombok.experimental.Delegate; +import org.pf4j.Extension; +import org.polypheny.db.adapter.ConnectionMethod; +import org.polypheny.db.adapter.DataSource; +import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DocumentDataSource; +import org.polypheny.db.adapter.DocumentScanDelegate; +import org.polypheny.db.adapter.Scannable; +import org.polypheny.db.adapter.annotations.AdapterProperties; +import org.polypheny.db.adapter.annotations.AdapterSettingList; +import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.catalogs.AdapterCatalog; +import org.polypheny.db.catalog.catalogs.DocAdapterCatalog; +import org.polypheny.db.catalog.entity.allocation.AllocationCollection; +import org.polypheny.db.catalog.entity.allocation.AllocationGraph; +import org.polypheny.db.catalog.entity.allocation.AllocationTable; +import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; +import org.polypheny.db.catalog.entity.logical.LogicalCollection; +import org.polypheny.db.catalog.entity.logical.LogicalGraph; +import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; +import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.prepare.Context; +import org.polypheny.db.schema.Namespace; +import org.polypheny.db.transaction.PolyXid; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Extension +@AdapterProperties( + name = "XML", + description = "An adapter for querying XML files. An XML file or a directory containing multiple XML files can be specified by path. Currently, this adapter only supports read operations.", + usedModes = DeployMode.EMBEDDED, + defaultMode = DeployMode.EMBEDDED) +@AdapterSettingList(name = "method", options = { "link", "url" }, defaultValue = "upload", description = "If the supplied file(s) should be uploaded or a link to the local filesystem is used (sufficient permissions are required).", position = 1) +//@AdapterSettingDirectory(subOf = "method_upload", name = "directory", defaultValue = "classpath://products.xml", description = "Path to the XML file(s) to be integrated as this source.", position = 2) +@AdapterSettingString(subOf = "method_link", defaultValue = "classpath://products.xml", name = "directoryName", description = "Path to the XML file(s) to be integrated as this source.", position = 2) +@AdapterSettingString(subOf = "method_url", defaultValue = "http://localhost/cars.xml", name = "url", description = "URL to the XML file(s) to be integrated as this source.", position = 2) + +public class XmlSource extends DataSource implements DocumentDataSource, Scannable { + + private static final Logger log = LoggerFactory.getLogger( XmlSource.class ); + @Delegate(excludes = Excludes.class) + private final DocumentScanDelegate delegate; + private XmlNamespace namespace; + private final ConnectionMethod connectionMethod; + private URL xmlFiles; + + + public XmlSource( final long storeId, final String uniqueName, final Map settings, DeployMode mode ) { + super( storeId, uniqueName, settings, mode, true, new DocAdapterCatalog( storeId ), Set.of( DataModel.DOCUMENT ) ); + this.connectionMethod = settings.containsKey( "method" ) ? ConnectionMethod.from( settings.get( "method" ).toUpperCase() ) : ConnectionMethod.UPLOAD; + this.xmlFiles = getXmlFilesUrl( settings ); + this.delegate = new DocumentScanDelegate( this, getAdapterCatalog() ); + long namespaceId = Catalog.getInstance().createNamespace( uniqueName, DataModel.DOCUMENT, true ); + this.namespace = new XmlNamespace( uniqueName, namespaceId, getAdapterId() ); + } + + + @Override + protected void reloadSettings( List updatedSettings ) { + if ( updatedSettings.contains( "directory" ) ) { + this.xmlFiles = getXmlFilesUrl( settings ); + } + } + + + private URL getXmlFilesUrl( final Map settings ) { + return switch ( connectionMethod ) { + case LINK -> { + String files = settings.get( "directoryName" ); + if ( files.startsWith( "classpath://" ) ) { + yield this.getClass().getClassLoader().getResource( files.replace( "classpath://", "" ) ); + } + try { + yield new File( files ).toURI().toURL(); + } catch ( MalformedURLException e ) { + throw new GenericRuntimeException( e ); + } + } + case UPLOAD -> { + String files = settings.get( "directory" ); + if ( files.startsWith( "classpath://" ) ) { + yield this.getClass().getClassLoader().getResource( files.replace( "classpath://", "" ) + "/" ); + } + try { + yield new File( files ).toURI().toURL(); + } catch ( MalformedURLException e ) { + throw new GenericRuntimeException( e ); + } + } + case URL -> { + String files = settings.get( "url" ); + try { + yield new URL( files ); + } catch ( MalformedURLException e ) { + throw new GenericRuntimeException( e ); + } + } + }; + } + + + @Override + public void updateNamespace( String name, long id ) { + namespace = new XmlNamespace( name, id, adapterId ); + } + + + @Override + public Namespace getCurrentNamespace() { + return namespace; + } + + + @Override + public void shutdown() { + removeInformationPage(); + } + + + @Override + public List getExportedCollection() { + try { + return XmlMetaRetriever.getDocuments( xmlFiles ); + } catch ( IOException e ) { + throw new GenericRuntimeException( "Failed to retrieve documents from XML file." ); + } + } + + + @Override + public AdapterCatalog getCatalog() { + return adapterCatalog; + } + + + @Override + public void restoreCollection( AllocationCollection allocation, List entities, Context context ) { + PhysicalEntity collection = entities.get( 0 ); // TODO: set breakpoint and take a look at what's in this list... + updateNamespace( collection.getNamespaceName(), collection.getNamespaceId() ); + try { + PhysicalCollection physicalCollection = new XmlCollection( XmlMetaRetriever.findDocumentUrl( xmlFiles, collection.getName() ), collection, allocation.getId(), namespace, this ); + adapterCatalog.addPhysical( allocation, physicalCollection ); + } catch ( MalformedURLException | NoSuchFileException e ) { + throw new GenericRuntimeException( e ); + } + } + + + @Override + public List createCollection( Context context, LogicalCollection logical, AllocationCollection allocation ) { + PhysicalCollection collection = adapterCatalog.createCollection( + logical.getNamespaceName(), + logical.getName(), + logical, + allocation + ); + try { + PhysicalCollection physicalCollection = new XmlCollection( XmlMetaRetriever.findDocumentUrl( xmlFiles, collection.getName() ), collection, allocation.getId(), namespace, this ); + adapterCatalog.replacePhysical( physicalCollection ); + return List.of( physicalCollection ); + } catch ( MalformedURLException | NoSuchFileException e ) { + throw new GenericRuntimeException( e ); + } + } + + + @Override + public void dropCollection( Context context, AllocationCollection allocation ) { + // TODO: What is this supposed to do? + } + + + @Override + public void renameLogicalColumn( long id, String newColumnName ) { + // TODO: Ask David: Why is this part of this interface? + log.debug( "NOT SUPPORTED: XML source does not support method renameLogicalColumn()" ); + } + + + @Override + public void truncate( Context context, long allocId ) { + log.debug( "NOT SUPPORTED: XML source does not support method truncate()." ); + } + + + @Override + public boolean prepare( PolyXid xid ) { + log.debug( "NOT SUPPORTED: XML source does not support method prepare()." ); + return true; + } + + + @Override + public void commit( PolyXid xid ) { + log.debug( "NOT SUPPORTED: XML source does not support method commit()." ); + } + + + @Override + public void rollback( PolyXid xid ) { + log.debug( "NOT SUPPORTED: XML source does not support method rollback()." ); + } + + + @Override + public void dropTable( Context context, long allocId ) { + log.debug( "NOT SUPPORTED: XML source does not support method dropTable()" ); + } + + + @Override + public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + log.debug( "NOT SUPPORTED: XML source does not support method createTable()." ); + return null; + } + + + @Override + public void restoreTable( AllocationTable alloc, List entities, Context context ) { + log.debug( "NOT SUPPORTED: XML source does not support method restoreTable()." ); + } + + + @Override + public List createGraph( Context context, LogicalGraph logical, AllocationGraph allocation ) { + log.debug( "NOT SUPPORTED: XML source does not support method createGraph()" ); + return null; + } + + + @Override + public void dropGraph( Context context, AllocationGraph allocation ) { + log.debug( "NOT SUPPORTED: XML source does not support method dropGraph()" ); + } + + + @Override + public void restoreGraph( AllocationGraph alloc, List entities, Context context ) { + log.debug( "NOT SUPPORTED: XML source does not support method restoreGraph()." ); + } + + + @Override + public DocumentDataSource asDocumentDataSource() { + return this; + } + + + private interface Excludes { + + void refreshCollection( long allocId ); + + void createCollection( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ); + + void restoreCollection( AllocationTable alloc, List entities ); + + } + +} diff --git a/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlToPolyConverter.java b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlToPolyConverter.java new file mode 100644 index 0000000000..e7354b9ef6 --- /dev/null +++ b/plugins/xml-adapter/src/main/java/org/polypheny/db/adapter/xml/XmlToPolyConverter.java @@ -0,0 +1,137 @@ +package org.polypheny.db.adapter.xml; + +import java.math.BigDecimal; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.Base64; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.xml.stream.XMLStreamConstants; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; +import org.apache.commons.codec.DecoderException; +import org.apache.commons.codec.binary.Hex; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.type.entity.PolyBinary; +import org.polypheny.db.type.entity.PolyBoolean; +import org.polypheny.db.type.entity.PolyList; +import org.polypheny.db.type.entity.PolyNull; +import org.polypheny.db.type.entity.PolyString; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.type.entity.document.PolyDocument; +import org.polypheny.db.type.entity.numerical.PolyBigDecimal; +import org.polypheny.db.type.entity.numerical.PolyDouble; +import org.polypheny.db.type.entity.numerical.PolyFloat; +import org.polypheny.db.type.entity.numerical.PolyLong; +import org.polypheny.db.type.entity.relational.PolyMap; +import org.polypheny.db.type.entity.temporal.PolyDate; +import org.polypheny.db.type.entity.temporal.PolyTime; +import org.polypheny.db.type.entity.temporal.PolyTimestamp; + +public final class XmlToPolyConverter { + + public PolyDocument toPolyDocument( XMLStreamReader reader, String elementOuterName ) throws XMLStreamException, DecoderException { + return new PolyDocument( toPolyMap( reader, elementOuterName ) ); + } + + + private PolyMap toPolyMap( XMLStreamReader reader, String elementOuterName ) throws XMLStreamException, DecoderException { + Map map = new HashMap<>(); + int event; + while ( reader.hasNext() ) { + if ( reader.getEventType() != XMLStreamConstants.START_ELEMENT ) { + event = reader.next(); + } else { + event = reader.getEventType(); + } + if ( event == XMLStreamConstants.START_ELEMENT ) { + PolyString key = new PolyString( reader.getLocalName() ); + PolyValue value = toPolyValue( reader ); + map.put( key, value ); + continue; + } + if ( event == XMLStreamConstants.END_ELEMENT && reader.getLocalName().equals( elementOuterName ) ) { + break; + } + } + return PolyMap.of( map ); + } + + + private PolyValue toPolyValue( XMLStreamReader reader ) throws XMLStreamException, DecoderException { + String currentElementName = reader.getLocalName(); + String typeName = reader.getAttributeValue( null, "type" ); + if ( typeName == null ) { + typeName = "string"; + } + + if ( !reader.hasNext() ) { + throw new XMLStreamException( "Unexpected end of stream." ); + } + reader.next(); + if ( reader.getEventType() == XMLStreamConstants.END_ELEMENT ) { + return new PolyNull(); + } + String value = reader.getText().trim(); + if ( value.isEmpty() ) { + if ( typeName.equals( "list" ) ) { + reader.next(); // skip empty value between list body and first element + return toPolyList( reader, currentElementName ); + } + if ( typeName.equals( "string" ) ) { // This is "string" as nested documents or null values don't have a type specified and are auto typed as string ;) + reader.next(); // skip empty value between list body and first element + return toPolyMap( reader, currentElementName ); + } + } + + return switch ( typeName ) { + case "boolean" -> new PolyBoolean( Boolean.parseBoolean( value ) ); + case "integer" -> new PolyLong( Long.parseLong( value ) ); + case "decimal" -> new PolyBigDecimal( new BigDecimal( value ) ); + case "float" -> new PolyFloat( Float.parseFloat( value ) ); + case "double" -> new PolyDouble( Double.parseDouble( value ) ); + case "date" -> { + LocalDate date = LocalDate.parse( value, DateTimeFormatter.ISO_DATE ); + yield new PolyDate( date.atStartOfDay().toInstant( java.time.ZoneOffset.UTC ).toEpochMilli() ); + } + case "time" -> { + LocalTime time = LocalTime.parse( value, DateTimeFormatter.ISO_TIME ); + yield new PolyTime( (int) (time.toNanoOfDay() / 1_000_000) ); + } + case "dateTime" -> { + LocalDateTime dateTime = LocalDateTime.parse( value, DateTimeFormatter.ISO_DATE_TIME ); + yield new PolyTimestamp( dateTime.toInstant( java.time.ZoneOffset.UTC ).toEpochMilli() ); + } + case "base64Binary" -> { + byte[] binaryData = Base64.getDecoder().decode( value ); + yield new PolyBinary( binaryData, binaryData.length ); + } + case "hexBinary" -> { + byte[] hexBinaryData = Hex.decodeHex( value.toCharArray() ); + yield new PolyBinary( hexBinaryData, hexBinaryData.length ); + } + case "string" -> new PolyString( value ); + default -> throw new GenericRuntimeException( "Illegal type encountered: " + typeName ); + }; + } + + + private PolyValue toPolyList( XMLStreamReader reader, String listOuterName ) throws XMLStreamException, DecoderException { + List values = new ArrayList<>(); + while ( !reader.getLocalName().equals( listOuterName ) ) { + values.add( toPolyValue( reader ) ); + if ( !reader.hasNext() ) { + throw new XMLStreamException( "Unexpected end of stream." ); + } + reader.next(); // skip empty characters element + reader.next(); // get to next start element + reader.next(); // skip empty characters element + } + return new PolyList<>( values ); + } + +} diff --git a/plugins/xml-adapter/src/main/resources/products.xml b/plugins/xml-adapter/src/main/resources/products.xml new file mode 100644 index 0000000000..b7ca7263eb --- /dev/null +++ b/plugins/xml-adapter/src/main/resources/products.xml @@ -0,0 +1,140 @@ + + + + 001 + Eco-Friendly Water Bottle + Home and Kitchen + A reusable, BPA-free water bottle made from sustainable materials. + 19.99 + 150 + + Reusable + BPA-free + Sustainable materials + + + Fancy Ltd. + 22 + + + + 002 + Wireless Noise-Canceling Headphones + Electronics + High-quality headphones with active noise cancellation and long battery life. + 129.99 + 75 + + Active noise cancellation + Long battery life + Wireless + + + SoundTech Inc. + 45 + + + + 003 + Smart LED Bulb + Smart Home + Color-changing smart LED bulb controllable via mobile app or voice assistant. + 24.99 + 200 + + Color-changing + Controllable via app + Voice assistant compatible + + + BrightHome + 67 + + + + 004 + Portable Bluetooth Speaker + Audio + Compact, waterproof Bluetooth speaker with rich sound and deep bass. + 59.99 + 120 + + Compact + Waterproof + Rich sound + + + SoundWave Co. + 89 + + + + 005 + Stainless Steel Cookware Set + Home and Kitchen + Durable and stylish stainless steel cookware set with non-stick coating. + 249.99 + 50 + + Durable + Stylish + Non-stick coating + + + CookMaster + 33 + + + + 006 + 4K Ultra HD Smart TV + Electronics + 55-inch 4K Ultra HD Smart TV with built-in streaming services and HDR support. + 499.99 + 30 + + 55-inch + 4K Ultra HD + HDR support + Built-in streaming services + + + VisionTech + 57 + + + + 007 + Ergonomic Office Chair + Office + Comfortable office chair with lumbar support and adjustable height settings. + 179.99 + 40 + + Lumbar support + Adjustable height + Comfortable + + + ComfortSeating + 72 + + + + 008 + Fitness Tracker + Health and Wellness + Wearable fitness tracker with heart rate monitor, GPS, and sleep tracking. + 99.99 + 100 + + Heart rate monitor + GPS + Sleep tracking + + + FitLife + 88 + + + diff --git a/plugins/xml-adapter/src/test/java/org/polypheny/db/test/XmlToPolyConverterTest.java b/plugins/xml-adapter/src/test/java/org/polypheny/db/test/XmlToPolyConverterTest.java new file mode 100644 index 0000000000..16996895b7 --- /dev/null +++ b/plugins/xml-adapter/src/test/java/org/polypheny/db/test/XmlToPolyConverterTest.java @@ -0,0 +1,200 @@ +/* + * Copyright 2019-2024 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.io.StringReader; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; +import org.apache.commons.codec.DecoderException; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.polypheny.db.TestHelper; +import org.polypheny.db.adapter.xml.XmlToPolyConverter; +import org.polypheny.db.type.entity.PolyList; +import org.polypheny.db.type.entity.PolyString; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.type.entity.document.PolyDocument; + +public class XmlToPolyConverterTest { + + private static XmlToPolyConverter converter; + + + @BeforeAll + public static void setup() { + converter = new XmlToPolyConverter(); + TestHelper testHelper = TestHelper.getInstance(); + } + + + @Test + public void testString() throws XMLStreamException, DecoderException { + String xml = "Maxine"; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertEquals( "Maxine", polyDoc.get( new PolyString( "name" ) ).asString().getValue() ); + } + + + @Test + public void testLong() throws XMLStreamException, DecoderException { + String xml = "492943"; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertEquals( 492943, polyDoc.get( new PolyString( "integer" ) ).asLong().getValue() ); + } + + + @Test + public void testDouble() throws XMLStreamException, DecoderException { + String xml = "-650825.13"; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertEquals( -650825.13, polyDoc.get( new PolyString( "double" ) ).asDouble().getValue() ); + } + + + @Test + public void testBooleanTrue() throws XMLStreamException, DecoderException { + String xml = "true"; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertEquals( true, polyDoc.get( new PolyString( "boolean" ) ).asBoolean().getValue() ); + } + + + @Test + public void testBooleanFalse() throws XMLStreamException, DecoderException { + String xml = "false"; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertEquals( false, polyDoc.get( new PolyString( "boolean" ) ).asBoolean().getValue() ); + } + + + @Test + public void testArray() throws XMLStreamException, DecoderException { + String xml = "\n" + + " 0\n" + + " 1\n" + + " 2\n" + + " 3\n" + + "\n"; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertTrue( polyDoc.get( new PolyString( "integers" ) ).isList() ); + PolyList list = polyDoc.get( new PolyString( "integers" ) ).asList(); + assertEquals( "0", list.get( 0 ).asString().getValue() ); + assertEquals( "1", list.get( 1 ).asString().getValue() ); + assertEquals( "2", list.get( 2 ).asString().getValue() ); + assertEquals( "3", list.get( 3 ).asString().getValue() ); + } + + + @Test + public void testNull() throws XMLStreamException, DecoderException { + String xml = ""; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertTrue( polyDoc.get( new PolyString( "null" ) ).isNull() ); + } + + + @Test + public void testDate() throws XMLStreamException, DecoderException { + String xml = "2024-08-07"; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertEquals( 1722988800000L, polyDoc.get( new PolyString( "date" ) ).asDate().millisSinceEpoch ); + } + + + @Test + public void testTime() throws XMLStreamException, DecoderException { + String xml = ""; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertEquals( 49530000, polyDoc.get( new PolyString( "time" ) ).asTime().ofDay ); + } + + + @Test + public void testDateTime() throws XMLStreamException, DecoderException { + String xml = "2024-08-07T13:45:30"; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertEquals( 1723038330000L, polyDoc.get( new PolyString( "dateTime" ) ).asTimestamp().millisSinceEpoch ); + } + + + @Test + public void testBase64Binary() throws XMLStreamException, DecoderException { + String xml = "SGVsbG8sIFdvcmxkIQ=="; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertEquals( "Hello, World!", new String( polyDoc.get( new PolyString( "binary" ) ).asBinary().getValue() ) ); + } + + + @Test + public void testHexBinary() throws XMLStreamException, DecoderException { + String xml = "48656c6c6f2c20576f726c6421"; + XMLStreamReader reader = XMLInputFactory.newDefaultFactory().createXMLStreamReader( new StringReader( xml ) ); + reader.next(); + PolyValue value = converter.toPolyDocument( reader, "root" ); + assertTrue( value.isDocument() ); + PolyDocument polyDoc = value.asDocument(); + assertEquals( "Hello, World!", new String( polyDoc.get( new PolyString( "binary" ) ).asBinary().getValue() ) ); + } + +} diff --git a/settings.gradle b/settings.gradle index 583121783f..c270495ba7 100644 --- a/settings.gradle +++ b/settings.gradle @@ -49,6 +49,8 @@ include 'plugins:neo4j-adapter' include 'plugins:cottontail-adapter' include 'plugins:ethereum-adapter' include 'plugins:csv-adapter' +include 'plugins:json-adapter' +include 'plugins:xml-adapter' include 'plugins:mysql-adapter' include 'plugins:postgres-adapter' include 'plugins:monetdb-adapter' diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index b4396e8a22..7e5659a89a 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -78,14 +78,15 @@ import org.polypheny.db.ResultIterator; import org.polypheny.db.adapter.AbstractAdapterSetting; import org.polypheny.db.adapter.AbstractAdapterSettingDirectory; +import org.polypheny.db.adapter.AbstractAdapterSettingString; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.AdapterManager.AdapterInformation; import org.polypheny.db.adapter.ConnectionMethod; import org.polypheny.db.adapter.DataSource; -import org.polypheny.db.adapter.DataSource.ExportedColumn; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.adapter.java.AdapterTemplate; import org.polypheny.db.algebra.AlgCollation; @@ -1093,19 +1094,19 @@ void getAvailableSourceColumns( final Context ctx ) { for ( Long adapterId : adapterIds ) { Adapter adapter = AdapterManager.getInstance().getAdapter( adapterId ).orElseThrow(); if ( adapter instanceof DataSource dataSource ) { - for ( Entry> entry : dataSource.getExportedColumns().entrySet() ) { + for ( Entry> entry : dataSource.asRelationalDataSource().getExportedColumns().entrySet() ) { List columnList = new ArrayList<>(); for ( ExportedColumn col : entry.getValue() ) { UiColumnDefinition dbCol = UiColumnDefinition.builder() - .name( col.name ) - .dataType( col.type.getName() ) - .collectionsType( col.collectionsType == null ? "" : col.collectionsType.getName() ) - .nullable( col.nullable ) - .precision( col.length ) - .scale( col.scale ) - .dimension( col.dimension ) - .cardinality( col.cardinality ) - .primary( col.primary ) + .name( col.name() ) + .dataType( col.type().getName() ) + .collectionsType( col.collectionsType() == null ? "" : col.collectionsType().getName() ) + .nullable( col.nullable() ) + .precision( col.length() ) + .scale( col.scale() ) + .dimension( col.dimension() ) + .cardinality( col.cardinality() ) + .primary( col.primary() ) .build(); columnList.add( dbCol ); } @@ -1114,8 +1115,8 @@ void getAvailableSourceColumns( final Context ctx ) { } ctx.json( exportedColumns.toArray( new RelationalResult[0] ) ); return; - } + } } ctx.json( RelationalResult.builder().error( "Could not retrieve exported source fields." ).build() ); @@ -2081,7 +2082,7 @@ void createAdapter( final Context ctx ) throws ServletException, IOException { String body = ""; Map inputStreams = new HashMap<>(); - final AdapterModel a; + final AdapterModel adapterModel; if ( ctx.isMultipartFormData() ) { // collect all files e.g. csv files for ( Part part : ctx.req.getParts() ) { @@ -2091,50 +2092,56 @@ void createAdapter( final Context ctx ) throws ServletException, IOException { inputStreams.put( part.getName(), part.getInputStream() ); } } - a = HttpServer.mapper.readValue( body, AdapterModel.class ); + adapterModel = HttpServer.mapper.readValue( body, AdapterModel.class ); } else if ( "application/json".equals( ctx.contentType() ) ) { - a = ctx.bodyAsClass( AdapterModel.class ); + adapterModel = ctx.bodyAsClass( AdapterModel.class ); } else { ctx.status( HttpCode.BAD_REQUEST ); return; } - Map settings = new HashMap<>(); + AdapterTemplate adapterTemplate = AdapterManager.getAdapterTemplate( adapterModel.adapterName, adapterModel.type ); + // This is only used to be able to get the type of property based on the key + Map defaultSettings = adapterTemplate.settings.stream().collect( Collectors.toMap( e -> e.name, e -> e ) ); ConnectionMethod method = ConnectionMethod.UPLOAD; - if ( a.settings.containsKey( "method" ) ) { - method = ConnectionMethod.valueOf( a.settings.get( "method" ).toUpperCase() ); + if ( adapterModel.settings.containsKey( "method" ) ) { + method = ConnectionMethod.valueOf( adapterModel.settings.get( "method" ).toUpperCase() ); } - AdapterTemplate adapter = AdapterManager.getAdapterTemplate( a.adapterName, a.type ); - Map allSettings = adapter.settings.stream().collect( Collectors.toMap( e -> e.name, e -> e ) ); - for ( Map.Entry entry : a.settings.entrySet() ) { - AbstractAdapterSetting set = allSettings.get( entry.getKey() ); - if ( set == null ) { + Map adapterSettings = new HashMap<>(); + + for ( Map.Entry entry : adapterModel.settings.entrySet() ) { + if ( !defaultSettings.containsKey( entry.getKey() ) ) { + // specified property is not available for this adapter continue; } - if ( set instanceof AbstractAdapterSettingDirectory setting ) { - if ( method == ConnectionMethod.LINK ) { - Exception e = handleLinkFiles( ctx, a, setting, allSettings ); - if ( e != null ) { - ctx.json( RelationalResult.builder().exception( e ).build() ); - return; - } - settings.put( set.name, entry.getValue() ); - } else { - List fileNames = HttpServer.mapper.readValue( entry.getValue(), new TypeReference<>() { - } ); - String directory = handleUploadFiles( inputStreams, fileNames, setting, a ); - settings.put( set.name, directory ); + adapterSettings.put( entry.getKey(), entry.getValue() ); + AbstractAdapterSetting set = defaultSettings.get( entry.getKey() ); + // handle upload + if ( (set instanceof AbstractAdapterSettingDirectory settingDirectory) && method == ConnectionMethod.UPLOAD ) { + List fileNames = HttpServer.mapper.readValue( entry.getValue(), new TypeReference<>() { + } ); + String directory = handleUploadFiles( inputStreams, fileNames, settingDirectory, adapterModel ); + adapterSettings.put( entry.getKey(), directory ); + continue; + } + // handle linking + if ( (set instanceof AbstractAdapterSettingString settingString) && method == ConnectionMethod.LINK ) { + if ( !settingString.name.equals( "directoryName" ) ) { + continue; + } + Exception e = handleLinkFiles( settingString ); + if ( e != null ) { + ctx.json( RelationalResult.builder().exception( e ).build() ); + return; } - } else { - settings.put( set.name, entry.getValue() ); } } - settings.put( "mode", a.mode.toString() ); + adapterSettings.put( "mode", adapterModel.mode.toString() ); - String query = String.format( "ALTER ADAPTERS ADD \"%s\" USING '%s' AS '%s' WITH '%s'", a.name, a.adapterName, a.type, Crud.gson.toJson( settings ) ); + String query = String.format( "ALTER ADAPTERS ADD \"%s\" USING '%s' AS '%s' WITH '%s'", adapterModel.name, adapterModel.adapterName, adapterModel.type, Crud.gson.toJson( adapterSettings ) ); QueryLanguage language = QueryLanguage.from( "sql" ); Result res = LanguageCrud.anyQueryResult( QueryContext.builder() @@ -2158,20 +2165,20 @@ public void startAccessRequest( Context ctx ) { } - private Exception handleLinkFiles( Context ctx, AdapterModel a, AbstractAdapterSettingDirectory setting, Map settings ) { - if ( !settings.containsKey( "directoryName" ) ) { - return new GenericRuntimeException( "Security check for access was not performed; id missing." ); - } - Path path = Path.of( settings.get( "directoryName" ).defaultValue ); + private Exception handleLinkFiles( AbstractAdapterSettingString setting ) { + Path path = Path.of( setting.getValue() ); + SecurityManager.getInstance().requestPathAccess( "webui", "webui", path ); if ( !SecurityManager.getInstance().checkPathAccess( path ) ) { return new GenericRuntimeException( "Security check for access was not successful; not enough permissions." ); } - return null; } private static String handleUploadFiles( Map inputStreams, List fileNames, AbstractAdapterSettingDirectory setting, AdapterModel a ) { + if ( fileNames.isEmpty() ) { + throw new GenericRuntimeException( "No file or directory specified for upload!" ); + } for ( String fileName : fileNames ) { setting.inputStreams.put( fileName, inputStreams.get( fileName ) ); }