From c3a4b000f8593909debdbde06604c2ab50834e16 Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Mon, 31 Jul 2023 11:28:59 +0200 Subject: [PATCH 01/22] First EventCacheManager draft --- core/build.gradle | 2 + plugins/ethereum-adapter/build.gradle | 1 + .../adapter/ethereum/EthereumEnumerator.java | 6 +- .../db/adapter/ethereum/EthereumMapper.java | 16 +- .../db/adapter/ethereum/EthereumPlugin.java | 234 ++++++++++++++++++ .../db/adapter/ethereum/EthereumSchema.java | 17 +- .../db/adapter/ethereum/EthereumTable.java | 28 ++- .../adapter/ethereum/EventCacheManager.java | 134 ++++++++++ .../db/adapter/ethereum/EventData.java | 50 ++++ .../db/adapter/ethereum/EventDataReader.java | 115 +++++++++ 10 files changed, 591 insertions(+), 12 deletions(-) create mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java create mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java create mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java diff --git a/core/build.gradle b/core/build.gradle index a897c32f20..7b507a52e3 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -68,6 +68,8 @@ dependencies { exclude group: "com.github.spotbugs" } + implementation 'com.squareup.okhttp3:okhttp:4.11.0' + // --- Test Compile --- testImplementation group: "junit", name: "junit", version: junit_version testImplementation group: "org.hamcrest", name: "hamcrest-core", version: hamcrest_core_version // BSD 3-clause diff --git a/plugins/ethereum-adapter/build.gradle b/plugins/ethereum-adapter/build.gradle index ab2c2553e3..2812ece5c7 100644 --- a/plugins/ethereum-adapter/build.gradle +++ b/plugins/ethereum-adapter/build.gradle @@ -10,6 +10,7 @@ dependencies { } // Apache 2.0 // Apache 2.0 + implementation 'org.json:json:20210307' // --- Test Compile --- testImplementation project(path: ":core", configuration: "tests") diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java index a17a6c09b7..7f7e628286 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumEnumerator.java @@ -27,7 +27,7 @@ import org.apache.calcite.avatica.util.DateTimeUtils; import org.apache.calcite.linq4j.Enumerator; import org.apache.commons.lang3.time.FastDateFormat; - +import org.web3j.abi.datatypes.Event; /** * Enumerator that reads from a Blockchain. @@ -58,12 +58,12 @@ class EthereumEnumerator implements Enumerator { private E current; - EthereumEnumerator( String clientUrl, int blocks, AtomicBoolean cancelFlag, boolean stream, String[] filterValues, EthereumMapper mapper, Predicate blockNumberPredicate, RowConverter rowConverter ) { + EthereumEnumerator( String clientUrl, int blocks, AtomicBoolean cancelFlag, boolean stream, String[] filterValues, EthereumMapper mapper, Predicate blockNumberPredicate, RowConverter rowConverter, String contractAddress, BigInteger fromBlock, BigInteger toBlock, Event event ) { this.clientUrl = clientUrl; this.cancelFlag = cancelFlag; this.rowConverter = rowConverter; this.filterValues = filterValues; - this.reader = mapper.makeReader( clientUrl, blocks, blockNumberPredicate ); + this.reader = mapper.makeReader( clientUrl, blocks, blockNumberPredicate, contractAddress, fromBlock, toBlock, event ); this.blocks = blocks; } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java index db67e69734..32be5981da 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java @@ -19,18 +19,22 @@ import java.math.BigInteger; import java.util.function.Predicate; import org.web3j.protocol.core.methods.response.EthBlock; +import org.web3j.abi.datatypes.Event; public enum EthereumMapper { BLOCK, - TRANSACTION; + TRANSACTION, + EVENTDATA; static EthereumMapper getMapper( String tableName ) { if ( tableName.equals( "block" ) ) { return BLOCK; + } else if ( tableName.equals( "transaction" ) ) { + return TRANSACTION; } - return TRANSACTION; + return EVENTDATA; } @@ -95,10 +99,14 @@ public String[] map( Object obj ) { } - public BlockReader makeReader( String clientUrl, int blocks, Predicate blockNumberPredicate ) { + public BlockReader makeReader( String clientUrl, int blocks, Predicate blockNumberPredicate, String contractAddress, BigInteger fromBlock, BigInteger toBlock, Event event ) { if ( this == BLOCK ) { return new BlockReader( clientUrl, blocks, blockNumberPredicate ); + } else if ( this == TRANSACTION ) { + return new TransactionReader( clientUrl, blocks, blockNumberPredicate ); } - return new TransactionReader( clientUrl, blocks, blockNumberPredicate ); + return new EventDataReader( clientUrl, blocks, blockNumberPredicate, contractAddress, fromBlock, toBlock, event ); // Event Data; } + + // maybe I will need a new "makeEventReader" } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index e18f0bbe9c..085ded04e8 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -18,14 +18,24 @@ import com.google.common.collect.ImmutableMap; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; import java.math.BigInteger; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.ProtocolException; +import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; + import lombok.Getter; import lombok.extern.slf4j.Slf4j; +import org.json.JSONArray; +import org.json.JSONObject; import org.pf4j.Extension; import org.pf4j.Plugin; import org.pf4j.PluginWrapper; @@ -47,6 +57,10 @@ import org.polypheny.db.schema.Table; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; +import org.web3j.abi.TypeReference; +import org.web3j.abi.datatypes.Address; +import org.web3j.abi.datatypes.Event; +import org.web3j.abi.datatypes.generated.Uint256; import org.web3j.protocol.Web3j; import org.web3j.protocol.http.HttpService; @@ -93,6 +107,12 @@ public void stop() { @AdapterSettingString(name = "ClientUrl", description = "The URL of the ethereum JSON RPC client", defaultValue = "https://mainnet.infura.io/v3/4d06589e97064040b5da99cf4051ef04", position = 1) @AdapterSettingInteger(name = "Blocks", description = "The number of Blocks to fetch when processing a query", defaultValue = 10, position = 2, modifiable = true) @AdapterSettingBoolean(name = "ExperimentalFiltering", description = "Experimentally filter Past Block", defaultValue = false, position = 3, modifiable = true) + @AdapterSettingString(name = "SmartContractAddress", description = "Address of the smart contract address", defaultValue = "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984", position = 4, modifiable = true) // Event Data: Add annotation + @AdapterSettingString(name = "EtherscanApiKey", description = "Etherscan API Token", defaultValue = "PJBVZ3BE1AI5AKIMXGK1HNC59PCDH7CQSP", position = 5, modifiable = true) // Event Data: Add annotation + @AdapterSettingString(name = "fromBlock", description = "Fetch block from (Smart Contract)", defaultValue = "17669045", position = 6, modifiable = true) + @AdapterSettingString(name = "toBlock", description = "Fetch block to (Smart Contract)", defaultValue = "17669155", position = 7, modifiable = true) + @AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = true, position = 8, modifiable = true) + @AdapterSettingString(name = "AdapterTargetName", description = "Adapter Target Name", defaultValue = "ethereum", position = 6, modifiable = true) public static class EthereumDataSource extends DataSource { private String clientURL; @@ -101,6 +121,14 @@ public static class EthereumDataSource extends DataSource { @Getter private boolean experimentalFiltering; private EthereumSchema currentSchema; + private final String smartContractAddress; + private final String etherscanApiKey; + private final BigInteger fromBlock; + private final BigInteger toBlock; + private final Map eventInputsMap; + private Boolean startCaching; + private String adpaterTargetName; + List events = new ArrayList<>(); // for caching public EthereumDataSource( final int storeId, final String uniqueName, final Map settings ) { @@ -108,6 +136,13 @@ public EthereumDataSource( final int storeId, final String uniqueName, final Map setClientURL( settings.get( "ClientUrl" ) ); this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); + this.smartContractAddress = settings.get( "SmartContractAddress" ); // Event Data; Add smartContractAddress to EDataSource + this.etherscanApiKey = settings.get( "EtherscanApiKey" ); + this.fromBlock = new BigInteger( settings.get( "fromBlock" ) ); + this.toBlock = new BigInteger( settings.get( "toBlock" ) ); + this.eventInputsMap = new HashMap<>(); + this.startCaching = Boolean.parseBoolean( settings.get( "Caching" ) ); + this.adpaterTargetName = settings.get( "AdapterTargetName" ); createInformationPage(); enableInformationPage(); } @@ -157,6 +192,43 @@ public Map> getExportedColumns() { String[] transactionColumns = { "hash", "nonce", "block_hash", "block_number", "transaction_index", "from", "to", "value", "gas_price", "gas", "input", "creates", "public_key", "raw", "r", "s" }; PolyType[] transactionTypes = { PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR }; + String[] commonEventColumns = { "removed", "log_index", "transaction_index", "transaction_hash", "block_hash", "block_number", "address" }; + PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR }; + + // Caching: Init own caching class. Start caching with "startCaching (idAdapter, smartContractAddress, wo gecached (in welchen Store))". + // In the background (another Thread) logs are fetched (also see restrictions eth_logs) + // As usual: Schema is still created here. + // Caching: We can define a threshold in which part of the data is inserted into the tables. (use flag for this) + + // Event Data Dynamic Scheme + List eventList = getEventsFromABI( etherscanApiKey, smartContractAddress ); + eventInputsMap.clear(); // clear the map + events.clear(); // clear the map + for ( JSONObject event : eventList ) { + String eventName = event.getString( "name" ); // to match it later with catalogTable.name + JSONArray inputsArray = event.getJSONArray( "inputs" ); + List inputsList = new ArrayList<>(); + List> eventParameters = new ArrayList<>(); + for ( int i = 0; i < inputsArray.length(); i++ ) { + JSONObject inputObject = inputsArray.getJSONObject( i ); + inputsList.add( inputObject ); + // put this into a method (modular) + String type = inputObject.getString( "type" ); + boolean indexed = inputObject.getBoolean( "indexed" ); + if ( type.equals( "address" ) ) { + eventParameters.add( indexed ? new TypeReference
( true ) { + } : new TypeReference
( false ) { + } ); + } else if ( type.equals( "uint256" ) ) { + eventParameters.add( indexed ? new TypeReference( true ) { + } : new TypeReference( false ) { + } ); + } + } + eventInputsMap.put( eventName.toLowerCase(), new EventData( eventName, inputsList ) ); + events.add( new Event( eventName, eventParameters ) ); + } + PolyType type = PolyType.VARCHAR; PolyType collectionsType = null; Integer length = 300; @@ -204,6 +276,66 @@ public Map> getExportedColumns() { position++; } map.put( "transaction", transactCols ); + + // Event Data: Creating columns for each event for specified smart contract based on ABI + for ( Map.Entry eventEntry : eventInputsMap.entrySet() ) { + String eventName = eventEntry.getValue().getOriginalKey(); // Get the original event name + List inputsList = eventEntry.getValue().getData(); // Get the data + List eventDataCols = new ArrayList<>(); + int inputPosition = 0; + + for ( JSONObject input : inputsList ) { + String inputName = input.getString( "name" ); + PolyType inputType = convertToPolyType( input.getString( "type" ) ); // convert event types to polytype + eventDataCols.add( new ExportedColumn( + inputName, + inputType, + collectionsType, + length, + scale, + dimension, + cardinality, + false, + "public", + eventName, // event name + inputName, + inputPosition, + inputPosition == 0 + ) ); + inputPosition++; + } + + // Adding common columns + for ( int i = 0; i < commonEventColumns.length; i++ ) { + String columnName = commonEventColumns[i]; + PolyType columnType = commonEventTypes[i]; + eventDataCols.add( new ExportedColumn( + columnName, + columnType, + collectionsType, + length, + scale, + dimension, + cardinality, + false, + "public", + eventName, // event name + columnName, + inputPosition, + inputPosition == 0 + ) ); + inputPosition++; + } + + map.put( eventName, eventDataCols ); + } + + // caching + if ( startCaching == Boolean.TRUE ) { + EventCacheManager eventCacheManager = new EventCacheManager( clientURL, 50, smartContractAddress, fromBlock, toBlock, events ); + eventCacheManager.startCaching(); + } + return map; } @@ -269,6 +401,108 @@ protected void createInformationPage() { } } + + protected List getEventsFromABI( String etherscanApiKey, String contractAddress ) { + List eventList = new ArrayList<>(); + try { + URL url = new URL( "https://api.etherscan.io/api?module=contract&action=getabi&address=" + contractAddress + "&apikey=" + etherscanApiKey ); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestMethod( "GET" ); + int responseCode = connection.getResponseCode(); + if ( responseCode == HttpURLConnection.HTTP_OK ) { + BufferedReader in = new BufferedReader( new InputStreamReader( connection.getInputStream() ) ); + String inputLine; + StringBuilder response = new StringBuilder(); + + while ( (inputLine = in.readLine()) != null ) { + response.append( inputLine ); + } + in.close(); + + JSONObject jsonObject = new JSONObject( response.toString() ); + String abi = jsonObject.getString( "result" ); + // Convert ABI string to JSON Array + JSONArray abiArray = new JSONArray( abi ); + for ( int i = 0; i < abiArray.length(); i++ ) { + JSONObject obj = abiArray.getJSONObject( i ); + + // Check if the current object is an event + if ( obj.getString( "type" ).equals( "event" ) ) { + eventList.add( obj ); + } + } + } + + } catch ( MalformedURLException e ) { + throw new RuntimeException( e ); + } catch ( ProtocolException e ) { + throw new RuntimeException( e ); + } catch ( IOException e ) { + throw new RuntimeException( e ); + } + + return eventList; + } + + + private PolyType convertToPolyType( String ethereumType ) { + if ( ethereumType.startsWith( "uint" ) || ethereumType.startsWith( "int" ) ) { + // Ethereum's uint and int types map to BIGINT in PolyType + return PolyType.BIGINT; + } else if ( ethereumType.startsWith( "bytes" ) || ethereumType.equals( "string" ) || ethereumType.equals( "address" ) ) { + // Ethereum's bytes, string and address types map to VARCHAR in PolyType + return PolyType.VARCHAR; + } else if ( ethereumType.equals( "bool" ) ) { + // Ethereum's bool type maps to BOOLEAN in PolyType + return PolyType.BOOLEAN; + } else { + // If the type is unknown, use VARCHAR as a general type + return PolyType.VARCHAR; + } + } + + + protected String getSmartContractAddress() { + return this.smartContractAddress; + } + + + protected BigInteger getFromBlock() { + return this.fromBlock; + } + + + protected BigInteger getToBlock() { + return this.toBlock; + } + + + protected Event getEventFromCatalogTable( String catalogTableName ) { + if ( catalogTableName.equals( "block" ) || catalogTableName.equals( "transaction" ) ) { + return null; + } + EventData eventData = eventInputsMap.get( catalogTableName ); + List jsonObjects = eventData.getData(); + List> parameterTypes = new ArrayList<>(); + for ( JSONObject jsonObject : jsonObjects ) { + String type = jsonObject.getString( "type" ); + boolean indexed = jsonObject.getBoolean( "indexed" ); + + if ( type.equals( "address" ) ) { + parameterTypes.add( indexed ? new TypeReference
( true ) { + } : new TypeReference
( false ) { + } ); + } else if ( type.equals( "uint256" ) ) { + parameterTypes.add( indexed ? new TypeReference( true ) { + } : new TypeReference( false ) { + } ); + } + // ... + } + + return new Event( eventData.getOriginalKey(), parameterTypes ); + } + } } \ No newline at end of file diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java index 676d7ed723..1bc04c8016 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java @@ -61,8 +61,21 @@ public Table createBlockchainTable( CatalogTable catalogTable, List i ).toArray(); - EthereumMapper mapper = catalogTable.name.equals( "block" ) ? EthereumMapper.BLOCK : EthereumMapper.TRANSACTION; - EthereumTable table = new EthereumTable( clientUrl, AlgDataTypeImpl.proto( fieldInfo.build() ), fieldTypes, fields, mapper, ethereumDataSource, catalogTable.id ); + EthereumMapper mapper = catalogTable.name.equals( "block" ) ? EthereumMapper.BLOCK : catalogTable.name.equals( "transaction" ) ? EthereumMapper.TRANSACTION : EthereumMapper.EVENTDATA; // Event Data; add EVENTDATA + // each table will get one EthereumTable; send event metadata down here. + EthereumTable table = new EthereumTable( + clientUrl, + AlgDataTypeImpl.proto( fieldInfo.build() ), + fieldTypes, + fields, + mapper, + ethereumDataSource, + catalogTable.id, + ethereumDataSource.getSmartContractAddress(), + ethereumDataSource.getFromBlock(), + ethereumDataSource.getToBlock(), + ethereumDataSource.getEventFromCatalogTable(catalogTable.name) + ); tableMap.put( catalogTable.name, table ); return table; } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java index 6c095c2e93..12962be3fd 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java @@ -34,6 +34,7 @@ import org.polypheny.db.schema.FilterableTable; import org.polypheny.db.schema.impl.AbstractTable; import org.polypheny.db.util.Pair; +import org.web3j.abi.datatypes.Event; public class EthereumTable extends AbstractTable implements FilterableTable { @@ -43,6 +44,10 @@ public class EthereumTable extends AbstractTable implements FilterableTable { protected final EthereumDataSource ethereumDataSource; protected final EthereumMapper mapper; protected List fieldTypes; + protected final String contractAddress; + protected final BigInteger fromBlock; + protected final BigInteger toBlock; + protected final Event event; public EthereumTable( @@ -52,7 +57,12 @@ public EthereumTable( int[] fields, EthereumMapper mapper, EthereumDataSource ethereumDataSource, - Long tableId ) { + Long tableId, + String contractAddress, + BigInteger fromBlock, + BigInteger toBlock, + Event event ) { + this.clientUrl = clientUrl; this.protoRowType = protoRowType; this.fieldTypes = fieldTypes; @@ -60,6 +70,10 @@ public EthereumTable( this.ethereumDataSource = ethereumDataSource; this.mapper = mapper; this.tableId = tableId; + this.contractAddress = contractAddress; + this.fromBlock = fromBlock; + this.toBlock = toBlock; + this.event = event; } @@ -104,7 +118,11 @@ public Enumerator enumerator() { null, mapper, finalBlockNumberPredicate, - (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ) ); + (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ) , + contractAddress, + fromBlock, + toBlock, + event); } }; } @@ -119,7 +137,11 @@ public Enumerator enumerator() { null, mapper, finalBlockNumberPredicate, - (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ) ); + (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ), + contractAddress, + fromBlock, + toBlock, + event); } }; } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java new file mode 100644 index 0000000000..b5639c0c71 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -0,0 +1,134 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.web3j.abi.datatypes.Event; +import org.web3j.protocol.Web3j; +import org.web3j.protocol.core.methods.request.EthFilter; +import org.web3j.protocol.core.DefaultBlockParameter; +import org.web3j.protocol.core.methods.response.EthLog; +import org.web3j.abi.EventEncoder; +import org.web3j.protocol.http.HttpService; + +public class EventCacheManager { + + private final int batchSizeInBlocks; + private Map> cacheMap; // a cache for each event + private List events; // maintain a list of events + private String smartContractAddress; + private BigInteger fromBlock; + private BigInteger toBlock; + protected final Web3j web3j; + + private boolean isCachingStarted = false; + + + // Create one instance to handle caching (better for load balancing if we have multiple stores) + // EventCacheManager is addressed by the Adapter (with registry method) + // get all the information: adapterId (adapter target name?), threshold, smart contract address, etherscan api key... all the necessary information + public EventCacheManager( String clientUrl, int batchSizeInBlocks, String smartContractAddress, BigInteger fromBlock, BigInteger toBlock, List events ) { + this.batchSizeInBlocks = batchSizeInBlocks; + this.smartContractAddress = smartContractAddress; + this.fromBlock = fromBlock; + this.toBlock = toBlock; + this.cacheMap = new HashMap<>(); + this.events = events; + for ( Event event : events ) { + this.cacheMap.put( event, new ArrayList<>() ); + } + ; + this.web3j = Web3j.build( new HttpService( clientUrl ) ); + } + + + public void startCaching() { + // 1. similiar to getExportedColumn - it only creates a source, but we need one to write it to the store + // 2. fetch logs from range x to y (chunk defined by threshold) is reached - addToCache + // 3. write these logs into store - writeToStore + // 4. Keep going until all the logs are written into the stores + System.out.println( "start to cache" ); + BigInteger currentBlock = fromBlock; + + while ( currentBlock.compareTo( toBlock ) <= 0 ) { + BigInteger endBlock = currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ); + if ( endBlock.compareTo( toBlock ) > 0 ) { + endBlock = toBlock; + } + + System.out.println( "from-to: " + currentBlock + " to " + endBlock ); + + // for each event fetch logs from block x to block y according to batchSizeInBlocks + for ( Event event : events ) { + addToCache( event, currentBlock, endBlock ); + } + + // just another loop for debugging reasons. I will put it in the first loop later on. + for ( Event event : events ) { + // if size == 0 skip + writeToStore( event, "targetStoreEth" ); // write the event into the store + cacheMap.get( event ).clear(); // clear cache batch + } + + currentBlock = endBlock.add( BigInteger.ONE ); // avoid overlapping block numbers + } + } + + + public synchronized void addToCache( Event event, BigInteger startBlock, BigInteger endBlock ) { + // fetch logs from block x to block y + // write it into the cache, so it can be written into the store + EthFilter filter = new EthFilter( + DefaultBlockParameter.valueOf( startBlock ), + DefaultBlockParameter.valueOf( endBlock ), + smartContractAddress + ); + + filter.addSingleTopic( EventEncoder.encode( event ) ); + + try { + List logs = web3j.ethGetLogs( filter ).send().getLogs(); + // Add fetched logs to cache + cacheMap.get( event ).addAll( logs ); + } catch ( IOException e ) { + // Handle exception here. Maybe log an error and re-throw, or set `logs` to an empty list. + } + } + + + private void writeToStore( Event event, String targetStore ) { + // write to targetStore + for ( Event e : events ) { + // write event into tables (see cacheMap > value) + } + + // clear the cache (logs) + cacheMap.get( event ).clear(); + } + + + private void getStreamStatus() { + // return status of process + } + +} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java new file mode 100644 index 0000000000..d932330c56 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java @@ -0,0 +1,50 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import java.util.List; +import org.json.JSONObject; + +public class EventData { + + private String originalKey; + private String lowercaseKey; + private List data; + + + public EventData( String originalKey, List data ) { + this.originalKey = originalKey; + this.lowercaseKey = originalKey.toLowerCase(); + this.data = data; + } + + + public String getOriginalKey() { + return originalKey; + } + + + public String getLowercaseKey() { + return lowercaseKey; + } + + + public List getData() { + return data; + } + +} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java new file mode 100644 index 0000000000..7a6058989e --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java @@ -0,0 +1,115 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.Arrays; +import java.util.function.Predicate; +import org.web3j.abi.FunctionReturnDecoder; +import org.web3j.abi.datatypes.Type; +import org.web3j.protocol.core.methods.response.EthLog; +import org.web3j.protocol.core.methods.response.Log; +import org.web3j.protocol.core.methods.request.EthFilter; +import org.web3j.protocol.core.DefaultBlockParameter; +import org.web3j.abi.datatypes.Event; +import org.web3j.abi.TypeReference; +import org.web3j.abi.EventEncoder; + +public class EventDataReader extends BlockReader { + + private List logs; + private int currentLogIndex = 0; + private Event event; + + + EventDataReader( String clientUrl, int blocks, Predicate blockNumberPrecate, String contractAddress, BigInteger fromBlock, BigInteger toBlock, Event event ) { + super( clientUrl, blocks, blockNumberPrecate ); + this.event = event; + + EthFilter filter = new EthFilter( + DefaultBlockParameter.valueOf( fromBlock ), + DefaultBlockParameter.valueOf( toBlock ), + contractAddress + ); + + filter.addSingleTopic( EventEncoder.encode( event ) ); + + try { + logs = web3j.ethGetLogs( filter ).send().getLogs(); // get logs + } catch ( IOException e ) { + // Handle exception here. Maybe log an error and re-throw, or set `logs` to an empty list. + } + } + + + @Override + public String[] readNext() throws IOException { + if ( this.blockReads <= 0 || currentLogIndex >= logs.size() ) { + return null; // no more blocks to read or no more logs to process + } + + EthLog.LogResult logResult = logs.get( currentLogIndex ); + Log log = (Log) logResult.get(); + + currentLogIndex++; // Move to the next log for the next call to readNext() + if ( currentLogIndex >= logs.size() ) { + this.blockReads--; // Decrement blockReads when all logs for the current block have been processed + } + + // Decode the data field of the log + String data = log.getData(); + List decodedData = FunctionReturnDecoder.decode( data, event.getNonIndexedParameters() ); + + // Decode the topics of the log + List topics = log.getTopics(); + topics.remove( 0 ); // The first topic is the event signature, so we skip it + List decodedTopics = new ArrayList<>(); + for ( int i = 0; i < topics.size(); i++ ) { + String topic = topics.get( i ); + TypeReference parameterType = event.getIndexedParameters().get( i ); + Type decodedTopic = FunctionReturnDecoder.decodeIndexedValue( topic, parameterType ); + decodedTopics.add( decodedTopic ); + } + + // Combine the decoded topics and data into a single array + List allDecodedParameters = new ArrayList<>(); + allDecodedParameters.addAll( decodedTopics ); + allDecodedParameters.addAll( decodedData ); + + // Convert the decoded parameters to a String array + String[] result = new String[allDecodedParameters.size()]; + for ( int i = 0; i < allDecodedParameters.size(); i++ ) { + result[i] = allDecodedParameters.get( i ).getValue().toString(); + } + + // Add additional columns + String[] extendedResult = Arrays.copyOf( result, result.length + 7 ); + extendedResult[result.length] = Boolean.toString( log.isRemoved() ); + extendedResult[result.length + 1] = log.getLogIndexRaw(); + extendedResult[result.length + 2] = log.getTransactionIndexRaw(); + extendedResult[result.length + 3] = log.getTransactionHash(); + extendedResult[result.length + 4] = log.getBlockHash(); + extendedResult[result.length + 5] = log.getBlockNumber().toString(); + extendedResult[result.length + 6] = log.getAddress(); + + return extendedResult; + } + +} From ad384d64bdf5023bc56383024748132f2db5a98d Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 2 Aug 2023 14:22:32 +0900 Subject: [PATCH 02/22] rearranged parts of the caching logic, first part of register schema --- .../org/polypheny/db/adapter/DataSource.java | 12 ++ .../db/processing/DataMigratorImpl.java | 2 +- .../db/adapter/ethereum/CachingStatus.java | 30 ++++ .../db/adapter/ethereum/EthereumPlugin.java | 19 +- .../db/adapter/ethereum/EventCache.java | 170 ++++++++++++++++++ .../adapter/ethereum/EventCacheManager.java | 163 +++++++++-------- 6 files changed, 303 insertions(+), 93 deletions(-) create mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java create mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java diff --git a/core/src/main/java/org/polypheny/db/adapter/DataSource.java b/core/src/main/java/org/polypheny/db/adapter/DataSource.java index 96aea10ac9..f86e90bd7a 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataSource.java @@ -26,6 +26,8 @@ import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; +import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; +import org.polypheny.db.ddl.DdlManager.FieldInformation; import org.polypheny.db.prepare.Context; import org.polypheny.db.type.PolyType; @@ -93,6 +95,16 @@ public String getDisplayType() { return typeStr; } + + public ColumnTypeInformation toColumnTypeInformation() { + return new ColumnTypeInformation( type, collectionsType, length, scale, dimension, cardinality, nullable ); + + } + + public FieldInformation toFieldInformation(){ + + } + } diff --git a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java index 10fbe8baba..36faafce86 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java +++ b/dbms/src/main/java/org/polypheny/db/processing/DataMigratorImpl.java @@ -638,7 +638,7 @@ public void copyPartitionData( Transaction transaction, CatalogAdapter store, Ca CatalogPrimaryKey primaryKey = Catalog.getInstance().getPrimaryKey( sourceTable.primaryKey ); // Check Lists - List targetColumnPlacements = new LinkedList<>(); + List targetColumnPlacements = new ArrayList<>(); for ( CatalogColumn catalogColumn : columns ) { targetColumnPlacements.add( Catalog.getInstance().getColumnPlacement( store.id, catalogColumn.id ) ); } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java new file mode 100644 index 0000000000..3c6d2d11e6 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +public class CachingStatus { + + public float percent; + + public ProcessingState state; + + + public enum ProcessingState { + INITIALIZED, PROCESSING, DONE + } + +} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index 085ded04e8..eaa6b00e7f 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -115,6 +115,7 @@ public void stop() { @AdapterSettingString(name = "AdapterTargetName", description = "Adapter Target Name", defaultValue = "ethereum", position = 6, modifiable = true) public static class EthereumDataSource extends DataSource { + public static final String SCHEMA_NAME = "public"; private String clientURL; @Getter private int blocks; @@ -247,7 +248,7 @@ public Map> getExportedColumns() { dimension, cardinality, false, - "public", + SCHEMA_NAME, "block", blockCol, position, @@ -268,7 +269,7 @@ public Map> getExportedColumns() { dimension, cardinality, false, - "public", + SCHEMA_NAME, "transaction", transactCol, position, @@ -296,7 +297,7 @@ public Map> getExportedColumns() { dimension, cardinality, false, - "public", + SCHEMA_NAME, eventName, // event name inputName, inputPosition, @@ -318,7 +319,7 @@ public Map> getExportedColumns() { dimension, cardinality, false, - "public", + SCHEMA_NAME, eventName, // event name columnName, inputPosition, @@ -332,8 +333,10 @@ public Map> getExportedColumns() { // caching if ( startCaching == Boolean.TRUE ) { - EventCacheManager eventCacheManager = new EventCacheManager( clientURL, 50, smartContractAddress, fromBlock, toBlock, events ); - eventCacheManager.startCaching(); + EventCacheManager.getInstance() + .register( getAdapterId(), clientURL, 50, smartContractAddress, fromBlock, toBlock, events, map ) + .startCaching(); + } return map; @@ -433,10 +436,6 @@ protected List getEventsFromABI( String etherscanApiKey, String cont } } - } catch ( MalformedURLException e ) { - throw new RuntimeException( e ); - } catch ( ProtocolException e ) { - throw new RuntimeException( e ); } catch ( IOException e ) { throw new RuntimeException( e ); } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java new file mode 100644 index 0000000000..2346afe1eb --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -0,0 +1,170 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + + +import java.io.IOException; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.NotImplementedException; +import org.polypheny.db.adapter.DataSource.ExportedColumn; +import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; +import org.polypheny.db.ddl.DdlManager.FieldInformation; +import org.web3j.abi.EventEncoder; +import org.web3j.abi.datatypes.Event; +import org.web3j.protocol.Web3j; +import org.web3j.protocol.core.DefaultBlockParameter; +import org.web3j.protocol.core.methods.request.EthFilter; +import org.web3j.protocol.core.methods.response.EthLog; +import org.web3j.protocol.core.methods.response.EthLog.LogResult; +import org.web3j.protocol.http.HttpService; + +@Slf4j +public class EventCache { + + private final int batchSizeInBlocks; + private final Map>> cache = new ConcurrentHashMap<>(); // a cache for each event + private final List events; // maintain a list of events + private final String smartContractAddress; + private final BigInteger fromBlock; + private final BigInteger toBlock; + protected final Web3j web3j; + + public final int adapterId; + private final Map> columns; + + private boolean isCachingStarted = false; + + + // Create one instance to handle caching (better for load balancing if we have multiple stores) + // EventCacheManager is addressed by the Adapter (with registry method) + // get all the information: adapterId (adapter target name?), threshold, smart contract address, etherscan api key... all the necessary information + public EventCache( int adapterId, String clientUrl, int batchSizeInBlocks, String smartContractAddress, BigInteger fromBlock, BigInteger toBlock, List events, Map> columns ) { + this.adapterId = adapterId; + this.columns = columns; + this.batchSizeInBlocks = batchSizeInBlocks; + this.smartContractAddress = smartContractAddress; + this.fromBlock = fromBlock; + this.toBlock = toBlock; + this.events = events; + events.forEach( event -> this.cache.put( event, new ArrayList<>() ) ); + this.web3j = Web3j.build( new HttpService( clientUrl ) ); + } + + + public void initializeCaching() { + // register table in schema + this.createSchema(); + // start caching + this.startCaching(); + } + + + private void createSchema() { + + Map> columnInformations = columns.entrySet() + .stream() + .collect( + Collectors.toMap( + Entry::getKey, + table -> table.getValue() + .stream() + .map( ExportedColumn::toFieldInformation ) + .collect( Collectors.toList() ) ) ); + + EventCacheManager.getInstance().createTables( adapterId, columnInformations, ); + } + + + + public void startCaching() { + // 1. similiar to getExportedColumn - it only creates a source, but we need one to write it to the store + // 2. fetch logs from range x to y (chunk defined by threshold) is reached - addToCache + // 3. write these logs into store - writeToStore + // 4. Keep going until all the logs are written into the stores + log.warn( "start to cache" ); + BigInteger currentBlock = fromBlock; + + while ( currentBlock.compareTo( toBlock ) <= 0 ) { + BigInteger endBlock = currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ); + if ( endBlock.compareTo( toBlock ) > 0 ) { + endBlock = toBlock; + } + + System.out.println( "from-to: " + currentBlock + " to " + endBlock ); + + // for each event fetch logs from block x to block y according to batchSizeInBlocks + for ( Event event : events ) { + addToCache( event, currentBlock, endBlock ); + } + + // just another loop for debugging reasons. I will put it in the first loop later on. + for ( Event event : events ) { + // if size == 0 skip + writeToStore( event, "targetStoreEth" ); // write the event into the store + cache.get( event ).clear(); // clear cache batch + } + + currentBlock = endBlock.add( BigInteger.ONE ); // avoid overlapping block numbers + } + } + + + public synchronized void addToCache( Event event, BigInteger startBlock, BigInteger endBlock ) { + // fetch logs from block x to block y + // write it into the cache, so it can be written into the store + EthFilter filter = new EthFilter( + DefaultBlockParameter.valueOf( startBlock ), + DefaultBlockParameter.valueOf( endBlock ), + smartContractAddress + ); + + filter.addSingleTopic( EventEncoder.encode( event ) ); + + try { + List> logs = web3j.ethGetLogs( filter ).send().getLogs().stream().map( log -> (LogResult) log ).collect( Collectors.toList() ); + // Add fetched logs to cache + cache.get( event ).addAll( logs ); + } catch ( IOException e ) { + // Handle exception here. Maybe log an error and re-throw, or set `logs` to an empty list. + } + } + + + private void writeToStore( Event event, String targetStore ) { + // write to targetStore + for ( Event e : events ) { + // write event into tables (see cacheMap > value) + } + + // clear the cache (logs) + cache.get( event ).clear(); + } + + + public CachingStatus getStatus() { + throw new NotImplementedException(); + } + +} + diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index b5639c0c71..7e3f5d1b98 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -16,119 +16,118 @@ package org.polypheny.db.adapter.ethereum; -import java.io.IOException; import java.math.BigInteger; -import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; - +import java.util.Map.Entry; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.adapter.DataSource.ExportedColumn; +import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.Catalog.ConstraintType; +import org.polypheny.db.catalog.Catalog.PlacementType; +import org.polypheny.db.catalog.exceptions.EntityAlreadyExistsException; +import org.polypheny.db.catalog.exceptions.GenericCatalogException; +import org.polypheny.db.catalog.exceptions.UnknownColumnException; +import org.polypheny.db.catalog.exceptions.UnknownDatabaseException; +import org.polypheny.db.catalog.exceptions.UnknownPartitionTypeException; +import org.polypheny.db.catalog.exceptions.UnknownSchemaException; +import org.polypheny.db.catalog.exceptions.UnknownUserException; +import org.polypheny.db.ddl.DdlManager; +import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; +import org.polypheny.db.ddl.DdlManager.ConstraintInformation; +import org.polypheny.db.ddl.DdlManager.FieldInformation; +import org.polypheny.db.ddl.exception.ColumnNotExistsException; +import org.polypheny.db.ddl.exception.PartitionGroupNamesNotUniqueException; +import org.polypheny.db.transaction.Transaction; +import org.polypheny.db.transaction.TransactionException; +import org.polypheny.db.transaction.TransactionManager; import org.web3j.abi.datatypes.Event; -import org.web3j.protocol.Web3j; -import org.web3j.protocol.core.methods.request.EthFilter; -import org.web3j.protocol.core.DefaultBlockParameter; -import org.web3j.protocol.core.methods.response.EthLog; -import org.web3j.abi.EventEncoder; -import org.web3j.protocol.http.HttpService; +@Slf4j public class EventCacheManager { - private final int batchSizeInBlocks; - private Map> cacheMap; // a cache for each event - private List events; // maintain a list of events - private String smartContractAddress; - private BigInteger fromBlock; - private BigInteger toBlock; - protected final Web3j web3j; + private static EventCacheManager INSTANCE = null; - private boolean isCachingStarted = false; + private final TransactionManager transactionManager; + // concurrent map, which maintains multiple caches, which correspond to the adapter which requested the caches + public Map caches = new ConcurrentHashMap<>(); - // Create one instance to handle caching (better for load balancing if we have multiple stores) - // EventCacheManager is addressed by the Adapter (with registry method) - // get all the information: adapterId (adapter target name?), threshold, smart contract address, etherscan api key... all the necessary information - public EventCacheManager( String clientUrl, int batchSizeInBlocks, String smartContractAddress, BigInteger fromBlock, BigInteger toBlock, List events ) { - this.batchSizeInBlocks = batchSizeInBlocks; - this.smartContractAddress = smartContractAddress; - this.fromBlock = fromBlock; - this.toBlock = toBlock; - this.cacheMap = new HashMap<>(); - this.events = events; - for ( Event event : events ) { - this.cacheMap.put( event, new ArrayList<>() ); + + /** + * This gets called only once at the start of Polypheny to create a single instance of the manager + * after that the method will throw and the {@link #getInstance()} method is used to retrieve the initially create instance. + * + * @param manager is used to create new transactions, which are required to create new queries. + */ + public static synchronized EventCacheManager getAndSet( TransactionManager manager ) { + if ( INSTANCE != null ) { + throw new RuntimeException( String.format( "The %s was already set.", EventCacheManager.class.getSimpleName() ) ); } - ; - this.web3j = Web3j.build( new HttpService( clientUrl ) ); + INSTANCE = new EventCacheManager( manager ); + return INSTANCE; } - public void startCaching() { - // 1. similiar to getExportedColumn - it only creates a source, but we need one to write it to the store - // 2. fetch logs from range x to y (chunk defined by threshold) is reached - addToCache - // 3. write these logs into store - writeToStore - // 4. Keep going until all the logs are written into the stores - System.out.println( "start to cache" ); - BigInteger currentBlock = fromBlock; - - while ( currentBlock.compareTo( toBlock ) <= 0 ) { - BigInteger endBlock = currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ); - if ( endBlock.compareTo( toBlock ) > 0 ) { - endBlock = toBlock; - } + public static EventCacheManager getInstance() { + if ( INSTANCE == null ) { + throw new RuntimeException( String.format( "The %s was not correctly initialized.", EventCacheManager.class.getSimpleName() ) ); + } + return INSTANCE; + } - System.out.println( "from-to: " + currentBlock + " to " + endBlock ); - // for each event fetch logs from block x to block y according to batchSizeInBlocks - for ( Event event : events ) { - addToCache( event, currentBlock, endBlock ); - } + // Create one instance to handle caching (better for load balancing if we have multiple stores) + // EventCacheManager is addressed by the Adapter (with registry method) + // get all the information: adapterId (adapter target name?), threshold, smart contract address, etherscan api key... all the necessary information + private EventCacheManager( TransactionManager transactionManager ) { + this.transactionManager = transactionManager; + } - // just another loop for debugging reasons. I will put it in the first loop later on. - for ( Event event : events ) { - // if size == 0 skip - writeToStore( event, "targetStoreEth" ); // write the event into the store - cacheMap.get( event ).clear(); // clear cache batch - } - currentBlock = endBlock.add( BigInteger.ONE ); // avoid overlapping block numbers - } + public EventCache register( int adapterId, String clientUrl, int batchSizeInBlocks, String smartContractAddress, BigInteger fromBlock, BigInteger toBlock, List events, Map> map ) { + EventCache cache = new EventCache( adapterId, clientUrl, batchSizeInBlocks, smartContractAddress, fromBlock, toBlock, events, map ); + this.caches.put( adapterId, cache ); + return cache; } - public synchronized void addToCache( Event event, BigInteger startBlock, BigInteger endBlock ) { - // fetch logs from block x to block y - // write it into the cache, so it can be written into the store - EthFilter filter = new EthFilter( - DefaultBlockParameter.valueOf( startBlock ), - DefaultBlockParameter.valueOf( endBlock ), - smartContractAddress - ); + @Nullable + public EventCache getCache( int adapterId ) { + return caches.get( adapterId ); + } - filter.addSingleTopic( EventEncoder.encode( event ) ); + void createTables( int sourceAdapterId, Map> tableInformations, int adapterId ){ try { - List logs = web3j.ethGetLogs( filter ).send().getLogs(); - // Add fetched logs to cache - cacheMap.get( event ).addAll( logs ); - } catch ( IOException e ) { - // Handle exception here. Maybe log an error and re-throw, or set `logs` to an empty list. - } - } - + long namespaceId = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, "public" ).id; + Transaction transaction = transactionManager.startTransaction( Catalog.defaultDatabaseId, Catalog.defaultUserId, false, "Ethereum Plugin" ); + DataStore store = AdapterManager.getInstance().getStore(adapterId); + for ( Entry> table : tableInformations.entrySet() ) { + ConstraintInformation primaryConstraint = new ConstraintInformation( table.getKey()+"primary", ConstraintType.PRIMARY, List.of( table.getValue().get( 0 ).name ) ); // todo atm first column is primary, we should adjust that + DdlManager.getInstance().createTable( namespaceId, table.getKey(), table.getValue(), List.of(primaryConstraint), false, List.of(store), PlacementType.AUTOMATIC, transaction.createStatement() ); + } - private void writeToStore( Event event, String targetStore ) { - // write to targetStore - for ( Event e : events ) { - // write event into tables (see cacheMap > value) + try { + transaction.commit(); + } catch ( TransactionException e ) { + throw new RuntimeException( e ); + } + } catch ( EntityAlreadyExistsException | ColumnNotExistsException | UnknownPartitionTypeException | UnknownColumnException | PartitionGroupNamesNotUniqueException | UnknownSchemaException | UnknownDatabaseException | GenericCatalogException | UnknownUserException e ) { + throw new RuntimeException( e ); } - // clear the cache (logs) - cacheMap.get( event ).clear(); } - private void getStreamStatus() { + private Map getAllStreamStatus() { // return status of process + return caches.values().stream().collect( Collectors.toMap( c -> c.adapterId, EventCache::getStatus ) ); } } From c2bed895bcbc99c30de0ddbf59fba844965b8bbc Mon Sep 17 00:00:00 2001 From: datomo Date: Wed, 2 Aug 2023 20:44:48 +0900 Subject: [PATCH 03/22] added boilerplate for insert into table --- .../org/polypheny/db/adapter/DataSource.java | 3 +- .../org/polypheny/db/tools/AlgBuilder.java | 97 ++++++++++++++++--- .../db/adapter/ethereum/EthereumPlugin.java | 23 +++-- .../db/adapter/ethereum/EventCache.java | 33 ++----- .../adapter/ethereum/EventCacheManager.java | 78 ++++++++++++--- 5 files changed, 179 insertions(+), 55 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/DataSource.java b/core/src/main/java/org/polypheny/db/adapter/DataSource.java index f86e90bd7a..01d4da6d15 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataSource.java @@ -23,6 +23,7 @@ import lombok.AllArgsConstructor; import lombok.Getter; import org.pf4j.ExtensionPoint; +import org.polypheny.db.catalog.Catalog.Collation; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; @@ -102,7 +103,7 @@ public ColumnTypeInformation toColumnTypeInformation() { } public FieldInformation toFieldInformation(){ - + return new FieldInformation( name, toColumnTypeInformation(), PolyType.STRING_TYPES.contains( type ) ? Collation.getDefaultCollation() : null, null, physicalPosition ); } } diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index 87a5bf4ec8..c46d14687f 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -34,17 +34,60 @@ package org.polypheny.db.tools; +import static org.polypheny.db.util.Static.RESOURCE; + import com.google.common.base.Preconditions; -import com.google.common.collect.*; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import java.math.BigDecimal; +import java.util.AbstractList; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Deque; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.Getter; import org.apache.calcite.linq4j.Ord; import org.apache.calcite.linq4j.function.Experimental; import org.bson.BsonValue; -import org.polypheny.db.algebra.*; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgDistribution; +import org.polypheny.db.algebra.AlgFieldCollation; +import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.constant.SemiJoinType; -import org.polypheny.db.algebra.core.*; +import org.polypheny.db.algebra.core.Aggregate; +import org.polypheny.db.algebra.core.AggregateCall; +import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.core.AlgFactories.ScanFactory; +import org.polypheny.db.algebra.core.CorrelationId; +import org.polypheny.db.algebra.core.Filter; +import org.polypheny.db.algebra.core.Intersect; +import org.polypheny.db.algebra.core.Join; +import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.algebra.core.Match; +import org.polypheny.db.algebra.core.Minus; +import org.polypheny.db.algebra.core.Modify.Operation; +import org.polypheny.db.algebra.core.Project; +import org.polypheny.db.algebra.core.Scan; +import org.polypheny.db.algebra.core.SemiJoin; +import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.core.Union; +import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.document.LogicalDocumentProject; import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; @@ -53,6 +96,7 @@ import org.polypheny.db.algebra.logical.lpg.LogicalLpgProject; import org.polypheny.db.algebra.logical.lpg.LogicalLpgScan; import org.polypheny.db.algebra.logical.relational.LogicalFilter; +import org.polypheny.db.algebra.logical.relational.LogicalModify; import org.polypheny.db.algebra.logical.relational.LogicalProject; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.operators.OperatorName; @@ -63,8 +107,24 @@ import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.Operator; -import org.polypheny.db.plan.*; -import org.polypheny.db.rex.*; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptPredicateList; +import org.polypheny.db.plan.AlgOptSchema; +import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.plan.AlgOptUtil; +import org.polypheny.db.plan.Context; +import org.polypheny.db.plan.Contexts; +import org.polypheny.db.prepare.Prepare.CatalogReader; +import org.polypheny.db.rex.RexBuilder; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexCorrelVariable; +import org.polypheny.db.rex.RexExecutor; +import org.polypheny.db.rex.RexInputRef; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.rex.RexShuttle; +import org.polypheny.db.rex.RexSimplify; +import org.polypheny.db.rex.RexUtil; import org.polypheny.db.runtime.Hook; import org.polypheny.db.runtime.PolyCollections.PolyDictionary; import org.polypheny.db.schema.ModelTrait; @@ -72,17 +132,21 @@ import org.polypheny.db.schema.graph.PolyNode; import org.polypheny.db.transaction.Statement; import org.polypheny.db.type.PolyType; -import org.polypheny.db.util.*; +import org.polypheny.db.util.DateString; +import org.polypheny.db.util.Holder; +import org.polypheny.db.util.ImmutableBitSet; +import org.polypheny.db.util.ImmutableIntList; +import org.polypheny.db.util.ImmutableNullableList; +import org.polypheny.db.util.Litmus; +import org.polypheny.db.util.NlsString; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.TimeString; +import org.polypheny.db.util.TimestampString; +import org.polypheny.db.util.Util; +import org.polypheny.db.util.ValidatorUtil; import org.polypheny.db.util.mapping.Mapping; import org.polypheny.db.util.mapping.Mappings; -import javax.annotation.Nonnull; -import java.math.BigDecimal; -import java.util.*; -import java.util.stream.Collectors; - -import static org.polypheny.db.util.Static.RESOURCE; - /** * Builder for relational expressions. @@ -2501,6 +2565,13 @@ public void clear() { } + public AlgBuilder insert( AlgOptTable table ) { + LogicalModify modify = LogicalModify.create( table, (CatalogReader) algOptSchema, stack.pop().alg, Operation.INSERT, null, null, false ); + stack.add( new Frame( modify ) ); + return this; + } + + /** * Information necessary to create a call to an aggregate function. * diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index eaa6b00e7f..eaf2a0248b 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -23,15 +23,12 @@ import java.io.InputStreamReader; import java.math.BigInteger; import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.ProtocolException; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; - import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.json.JSONArray; @@ -46,9 +43,12 @@ import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.catalog.Adapter; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogAdapter; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.UnknownAdapterException; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; @@ -70,6 +70,8 @@ public class EthereumPlugin extends Plugin { public static final String ADAPTER_NAME = "ETHEREUM"; + public static final String HIDDEN_PREFIX = "$hidden$"; + /** * Constructor to be used by plugin manager for plugin instantiation. @@ -129,6 +131,7 @@ public static class EthereumDataSource extends DataSource { private final Map eventInputsMap; private Boolean startCaching; private String adpaterTargetName; + @Getter List events = new ArrayList<>(); // for caching @@ -333,9 +336,15 @@ public Map> getExportedColumns() { // caching if ( startCaching == Boolean.TRUE ) { - EventCacheManager.getInstance() - .register( getAdapterId(), clientURL, 50, smartContractAddress, fromBlock, toBlock, events, map ) - .startCaching(); + try { + CatalogAdapter cachingAdapter = Catalog.getInstance().getAdapter( "hsqldb" ); // todo atm we use just the default store to cache + EventCacheManager.getInstance() + .register( getAdapterId(), cachingAdapter.id, clientURL, 50, smartContractAddress, fromBlock, toBlock, events, map ) + .startCaching(); + } catch ( UnknownAdapterException e ) { + throw new RuntimeException( e ); + } + } @@ -444,7 +453,7 @@ protected List getEventsFromABI( String etherscanApiKey, String cont } - private PolyType convertToPolyType( String ethereumType ) { + static PolyType convertToPolyType( String ethereumType ) { if ( ethereumType.startsWith( "uint" ) || ethereumType.startsWith( "int" ) ) { // Ethereum's uint and int types map to BIGINT in PolyType return PolyType.BIGINT; diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index 2346afe1eb..a5f9ea7b61 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -22,13 +22,11 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.adapter.DataSource.ExportedColumn; -import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; import org.polypheny.db.ddl.DdlManager.FieldInformation; import org.web3j.abi.EventEncoder; import org.web3j.abi.datatypes.Event; @@ -42,6 +40,7 @@ @Slf4j public class EventCache { + private final int batchSizeInBlocks; private final Map>> cache = new ConcurrentHashMap<>(); // a cache for each event private final List events; // maintain a list of events @@ -50,17 +49,17 @@ public class EventCache { private final BigInteger toBlock; protected final Web3j web3j; - public final int adapterId; + public final int sourceAdapterId; private final Map> columns; - - private boolean isCachingStarted = false; + private final int targetAdapterId; // Create one instance to handle caching (better for load balancing if we have multiple stores) // EventCacheManager is addressed by the Adapter (with registry method) // get all the information: adapterId (adapter target name?), threshold, smart contract address, etherscan api key... all the necessary information - public EventCache( int adapterId, String clientUrl, int batchSizeInBlocks, String smartContractAddress, BigInteger fromBlock, BigInteger toBlock, List events, Map> columns ) { - this.adapterId = adapterId; + public EventCache( int sourceAdapterId, int targetAdapterId, String clientUrl, int batchSizeInBlocks, String smartContractAddress, BigInteger fromBlock, BigInteger toBlock, List events, Map> columns ) { + this.sourceAdapterId = sourceAdapterId; + this.targetAdapterId = targetAdapterId; this.columns = columns; this.batchSizeInBlocks = batchSizeInBlocks; this.smartContractAddress = smartContractAddress; @@ -81,22 +80,20 @@ public void initializeCaching() { private void createSchema() { - Map> columnInformations = columns.entrySet() .stream() .collect( Collectors.toMap( - Entry::getKey, + table -> EthereumPlugin.HIDDEN_PREFIX + table.getKey(), // we prepend this to hide the table to the user table -> table.getValue() .stream() .map( ExportedColumn::toFieldInformation ) .collect( Collectors.toList() ) ) ); - EventCacheManager.getInstance().createTables( adapterId, columnInformations, ); + EventCacheManager.getInstance().createTables( sourceAdapterId, columnInformations, targetAdapterId ); } - public void startCaching() { // 1. similiar to getExportedColumn - it only creates a source, but we need one to write it to the store // 2. fetch logs from range x to y (chunk defined by threshold) is reached - addToCache @@ -111,7 +108,7 @@ public void startCaching() { endBlock = toBlock; } - System.out.println( "from-to: " + currentBlock + " to " + endBlock ); + log.warn( "from-to: " + currentBlock + " to " + endBlock ); // for each event fetch logs from block x to block y according to batchSizeInBlocks for ( Event event : events ) { @@ -121,7 +118,7 @@ public void startCaching() { // just another loop for debugging reasons. I will put it in the first loop later on. for ( Event event : events ) { // if size == 0 skip - writeToStore( event, "targetStoreEth" ); // write the event into the store + EventCacheManager.getInstance().writeToStore( event, null ); // write the event into the store // todo add table nam cache.get( event ).clear(); // clear cache batch } @@ -151,16 +148,6 @@ public synchronized void addToCache( Event event, BigInteger startBlock, BigInte } - private void writeToStore( Event event, String targetStore ) { - // write to targetStore - for ( Event e : events ) { - // write event into tables (see cacheMap > value) - } - - // clear the cache (logs) - cache.get( event ).clear(); - } - public CachingStatus getStatus() { throw new NotImplementedException(); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 7e3f5d1b98..5ce0df600d 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -24,9 +24,15 @@ import java.util.stream.Collectors; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.PolyImplementation; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataSource.ExportedColumn; import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.AlgRoot; +import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.Catalog.ConstraintType; import org.polypheny.db.catalog.Catalog.PlacementType; @@ -38,11 +44,15 @@ import org.polypheny.db.catalog.exceptions.UnknownSchemaException; import org.polypheny.db.catalog.exceptions.UnknownUserException; import org.polypheny.db.ddl.DdlManager; -import org.polypheny.db.ddl.DdlManager.ColumnTypeInformation; import org.polypheny.db.ddl.DdlManager.ConstraintInformation; import org.polypheny.db.ddl.DdlManager.FieldInformation; import org.polypheny.db.ddl.exception.ColumnNotExistsException; import org.polypheny.db.ddl.exception.PartitionGroupNamesNotUniqueException; +import org.polypheny.db.plan.AlgOptTable; +import org.polypheny.db.rex.RexDynamicParam; +import org.polypheny.db.schema.PolyphenyDbSchema.TableEntry; +import org.polypheny.db.tools.AlgBuilder; +import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.transaction.TransactionManager; @@ -90,9 +100,9 @@ private EventCacheManager( TransactionManager transactionManager ) { } - public EventCache register( int adapterId, String clientUrl, int batchSizeInBlocks, String smartContractAddress, BigInteger fromBlock, BigInteger toBlock, List events, Map> map ) { - EventCache cache = new EventCache( adapterId, clientUrl, batchSizeInBlocks, smartContractAddress, fromBlock, toBlock, events, map ); - this.caches.put( adapterId, cache ); + public EventCache register( int sourceAdapterId, int targetAdapterId, String clientUrl, int batchSizeInBlocks, String smartContractAddress, BigInteger fromBlock, BigInteger toBlock, List events, Map> map ) { + EventCache cache = new EventCache( sourceAdapterId, targetAdapterId, clientUrl, batchSizeInBlocks, smartContractAddress, fromBlock, toBlock, events, map ); + this.caches.put( sourceAdapterId, cache ); return cache; } @@ -103,14 +113,14 @@ public EventCache getCache( int adapterId ) { } - void createTables( int sourceAdapterId, Map> tableInformations, int adapterId ){ + void createTables( int sourceAdapterId, Map> tableInformations, int targetAdapterId ) { try { long namespaceId = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, "public" ).id; - Transaction transaction = transactionManager.startTransaction( Catalog.defaultDatabaseId, Catalog.defaultUserId, false, "Ethereum Plugin" ); - DataStore store = AdapterManager.getInstance().getStore(adapterId); + Transaction transaction = getTransaction(); + DataStore store = AdapterManager.getInstance().getStore( targetAdapterId ); for ( Entry> table : tableInformations.entrySet() ) { - ConstraintInformation primaryConstraint = new ConstraintInformation( table.getKey()+"primary", ConstraintType.PRIMARY, List.of( table.getValue().get( 0 ).name ) ); // todo atm first column is primary, we should adjust that - DdlManager.getInstance().createTable( namespaceId, table.getKey(), table.getValue(), List.of(primaryConstraint), false, List.of(store), PlacementType.AUTOMATIC, transaction.createStatement() ); + ConstraintInformation primaryConstraint = new ConstraintInformation( table.getKey() + "primary", ConstraintType.PRIMARY, List.of( table.getValue().get( 0 ).name ) ); // todo atm first column is primary, we should adjust that + DdlManager.getInstance().createTable( namespaceId, table.getKey(), table.getValue(), List.of( primaryConstraint ), false, List.of( store ), PlacementType.AUTOMATIC, transaction.createStatement() ); } try { @@ -118,16 +128,62 @@ void createTables( int sourceAdapterId, Map> tabl } catch ( TransactionException e ) { throw new RuntimeException( e ); } - } catch ( EntityAlreadyExistsException | ColumnNotExistsException | UnknownPartitionTypeException | UnknownColumnException | PartitionGroupNamesNotUniqueException | UnknownSchemaException | UnknownDatabaseException | GenericCatalogException | UnknownUserException e ) { + } catch ( EntityAlreadyExistsException | ColumnNotExistsException | UnknownPartitionTypeException | UnknownColumnException | PartitionGroupNamesNotUniqueException | UnknownSchemaException e ) { throw new RuntimeException( e ); } } + private Transaction getTransaction() { + try { + Transaction transaction = transactionManager.startTransaction( Catalog.defaultDatabaseId, Catalog.defaultUserId, false, "Ethereum Plugin" ); + return transaction; + } catch ( UnknownSchemaException | UnknownDatabaseException | GenericCatalogException | UnknownUserException e ) { + throw new RuntimeException( e ); + } + + } + + + void writeToStore( Event event, String tableName ) { + // create fresh transaction + Transaction transaction = getTransaction(); + Statement statement = transaction.createStatement(); + + // use an AlgBuilder to create an algebra representation of the insert query + AlgBuilder builder = AlgBuilder.create( statement ); + + TableEntry table = transaction.getSchema().getTable( EthereumPlugin.HIDDEN_PREFIX + tableName ); + + AlgDataType rowType = table.getTable().getRowType( transaction.getTypeFactory() ); + builder.values( rowType ); + + // we use a project with dynamic parameters, so we can re-use it + builder.project( rowType.getFieldList().stream().map( f -> new RexDynamicParam( f.getType(), f.getIndex() ) ).collect( Collectors.toList() ) ); + + builder.insert( (AlgOptTable) table.getTable() ); + // todo we should re-use this for all batches + AlgNode node = builder.build(); + AlgRoot root = AlgRoot.of( node, Kind.INSERT ); + + // add dynamic parameters to context + int i = 0; + for ( AlgDataTypeField field : rowType.getFieldList() ) { + statement.getDataContext().addParameterValues( field.getIndex(), field.getType(), List.of( event.getIndexedParameters().get( i++ ).toString() ) ); // at the moment we only add one row at a time, could refactor to add the whole batch + } + + // execute the transaction + PolyImplementation implementation = statement.getQueryProcessor().prepareQuery( root, false ); + implementation.getRows( statement, -1 ); + + } + + + private Map getAllStreamStatus() { // return status of process - return caches.values().stream().collect( Collectors.toMap( c -> c.adapterId, EventCache::getStatus ) ); + return caches.values().stream().collect( Collectors.toMap( c -> c.sourceAdapterId, EventCache::getStatus ) ); } } From c07529b671de0cdfe928a859ec88567273bbaece Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Wed, 2 Aug 2023 15:56:50 +0200 Subject: [PATCH 04/22] Solve conflicts --- .../db/adapter/ethereum/EthereumPlugin.java | 19 +++++++++++--- .../adapter/ethereum/EventCacheManager.java | 25 +++++++++++++++++++ 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index eaa6b00e7f..25de70469c 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -127,8 +127,11 @@ public static class EthereumDataSource extends DataSource { private final BigInteger fromBlock; private final BigInteger toBlock; private final Map eventInputsMap; - private Boolean startCaching; + private Boolean cashing; private String adpaterTargetName; + private Boolean isCachingStarted = false; + private EventCacheManager eventCacheManager; + List events = new ArrayList<>(); // for caching @@ -142,7 +145,7 @@ public EthereumDataSource( final int storeId, final String uniqueName, final Map this.fromBlock = new BigInteger( settings.get( "fromBlock" ) ); this.toBlock = new BigInteger( settings.get( "toBlock" ) ); this.eventInputsMap = new HashMap<>(); - this.startCaching = Boolean.parseBoolean( settings.get( "Caching" ) ); + this.cashing = Boolean.parseBoolean( settings.get( "Caching" ) ); this.adpaterTargetName = settings.get( "AdapterTargetName" ); createInformationPage(); enableInformationPage(); @@ -332,13 +335,23 @@ public Map> getExportedColumns() { } // caching + // David if ( startCaching == Boolean.TRUE ) { EventCacheManager.getInstance() .register( getAdapterId(), clientURL, 50, smartContractAddress, fromBlock, toBlock, events, map ) .startCaching(); - } + // mine + /*if ( cashing == Boolean.TRUE ) { + if ( eventCacheManager == null ) { + eventCacheManager = new EventCacheManager( clientURL, 50, smartContractAddress, fromBlock, toBlock, events ); + } + if ( !eventCacheManager.hasStartedCaching() ) { + eventCacheManager.startCaching(); + } + }*/ + return map; } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 7e3f5d1b98..b099904487 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -74,6 +74,7 @@ public static synchronized EventCacheManager getAndSet( TransactionManager manag } + public static EventCacheManager getInstance() { if ( INSTANCE == null ) { throw new RuntimeException( String.format( "The %s was not correctly initialized.", EventCacheManager.class.getSimpleName() ) ); @@ -81,6 +82,25 @@ public static EventCacheManager getInstance() { return INSTANCE; } + /* + Mine + public void startCaching() { + // 1. similiar to getExportedColumn - it only creates a source, but we need one to write it to the store + // 2. fetch logs from range x to y (chunk defined by threshold) is reached - addToCache + // 3. write these logs into store - writeToStore + // 4. Keep going until all the logs are written into the stores + System.out.println( "start to cache" ); + isCachingStarted = true; + BigInteger currentBlock = fromBlock; + + while ( currentBlock.compareTo( toBlock ) <= 0 ) { + BigInteger endBlock = currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ); + if ( endBlock.compareTo( toBlock ) > 0 ) { + endBlock = toBlock; + } + } + }*/ + // Create one instance to handle caching (better for load balancing if we have multiple stores) // EventCacheManager is addressed by the Adapter (with registry method) @@ -130,4 +150,9 @@ private Map getAllStreamStatus() { return caches.values().stream().collect( Collectors.toMap( c -> c.adapterId, EventCache::getStatus ) ); } + + /*public boolean hasStartedCaching() { + return isCachingStarted; + }*/ + } From 3cd940692d08064447a4ca67ef9b37c677d927cc Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Sat, 5 Aug 2023 01:17:19 +0200 Subject: [PATCH 05/22] Fix (temp) createColumnDefinition in AbstractJdbcStore and remove block and transaction columns inside caching --- .../java/org/polypheny/db/adapter/ethereum/EventCache.java | 2 ++ .../polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index dcb89a8cb5..cdb49212de 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -82,6 +82,8 @@ public void initializeCaching() { // In this method, we create the appropriate schemas and tables in the catalog. (see also createTable) private void createSchema() { + columns.remove("block"); + columns.remove("transaction"); // todo: block and trx columns are also included. Remove? Map> columnInformations = columns.entrySet() .stream() diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 2c00eb4f27..7dd4a4f239 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -228,8 +228,11 @@ protected void createColumnDefinition( CatalogColumn catalogColumn, StringBuilde } else if ( catalogColumn.collectionsType == PolyType.MAP ) { builder.append( getTypeString( PolyType.ARRAY ) ); } else { + String typeString = getTypeString( catalogColumn.type ); builder.append( " " ).append( getTypeString( catalogColumn.type ) ); - if ( catalogColumn.length != null ) { + if ( catalogColumn.length != null && + !typeString.equalsIgnoreCase("BIGINT") && + !typeString.equalsIgnoreCase("BOOLEAN")) { builder.append( "(" ).append( catalogColumn.length ); if ( catalogColumn.scale != null ) { builder.append( "," ).append( catalogColumn.scale ); From fcbf986687353b10b7485fcc36942bb7ea7edf31 Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Sat, 5 Aug 2023 15:13:57 +0200 Subject: [PATCH 06/22] Remove fix in AbstractJdbcStore. Fix with getLengthForType --- plugins/ethereum-adapter/build.gradle | 1 + .../db/adapter/ethereum/EthereumPlugin.java | 15 +++++++++++++-- .../polypheny/db/adapter/ethereum/EventCache.java | 7 +++++-- .../db/adapter/ethereum/EventCacheManager.java | 11 ++++++++--- .../db/adapter/jdbc/stores/AbstractJdbcStore.java | 5 +---- 5 files changed, 28 insertions(+), 11 deletions(-) diff --git a/plugins/ethereum-adapter/build.gradle b/plugins/ethereum-adapter/build.gradle index 2812ece5c7..37c67c847f 100644 --- a/plugins/ethereum-adapter/build.gradle +++ b/plugins/ethereum-adapter/build.gradle @@ -2,6 +2,7 @@ group "org.polypheny" dependencies { + implementation project(path: ':core') compileOnly project(":core") // Apache 2.0 diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index f7ccbfff4b..1a61e0eeb4 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -195,6 +195,7 @@ public void truncate( Context context, CatalogTable table ) { throw new RuntimeException( "Blockchain adapter does not support truncate" ); } + // Because the EthereumAdapter is a source, Pp will ask (call this method) always what the structure of this is adapter is. @Override public Map> getExportedColumns() { @@ -268,6 +269,7 @@ public Map> getExportedColumns() { } map.put( "block", blockCols ); + List transactCols = new ArrayList<>(); position = 0; for ( String transactCol : transactionColumns ) { @@ -303,7 +305,7 @@ public Map> getExportedColumns() { inputName, inputType, collectionsType, - length, + getLengthForType(inputType), scale, dimension, cardinality, @@ -325,7 +327,7 @@ public Map> getExportedColumns() { columnName, columnType, collectionsType, - length, + getLengthForType(columnType), scale, dimension, cardinality, @@ -367,6 +369,15 @@ public Map> getExportedColumns() { return map; } + private Integer getLengthForType(PolyType type) { + switch (type) { + case VARCHAR: + return 300; + default: + return null; + } + } + @Override public boolean prepare( PolyXid xid ) { diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index cdb49212de..17740b0346 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -118,8 +118,11 @@ public void startCaching() { // just another loop for debugging reasons. I will put it in the first loop later on. for ( Event event : events ) { - // if size == 0 skip - EventCacheManager.getInstance().writeToStore( event, null ); // write the event into the store // todo add table name // (T): Question -> e.g "delegateChanged"? + if (cache.get( event ).size() == 0) { + continue; + } + String tableName = event.getName().toLowerCase(); + EventCacheManager.getInstance().writeToStore( event, tableName ); // write the event into the store // todo add table name // (T): Question -> e.g "delegateChanged"? cache.get( event ).clear(); // clear cache batch } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 1b144378ad..7091d8bbfd 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -17,6 +17,7 @@ package org.polypheny.db.adapter.ethereum; import java.math.BigInteger; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -48,6 +49,7 @@ import org.polypheny.db.ddl.DdlManager.FieldInformation; import org.polypheny.db.ddl.exception.ColumnNotExistsException; import org.polypheny.db.ddl.exception.PartitionGroupNamesNotUniqueException; +import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.rex.RexDynamicParam; import org.polypheny.db.schema.PolyphenyDbSchema.TableEntry; @@ -177,11 +179,14 @@ void writeToStore( Event event, String tableName ) { // It abstracts the complexity of building this tree, allowing for the creation of even complex expressions in a more manageable way. AlgBuilder builder = AlgBuilder.create( statement ); - TableEntry table = transaction.getSchema().getTable( EthereumPlugin.HIDDEN_PREFIX + tableName ); + // TableEntry table = transaction.getSchema().getTable( EthereumPlugin.HIDDEN_PREFIX + tableName ); + AlgOptSchema algOptSchema = (AlgOptSchema) transaction.getSchema(); + AlgOptTable table = algOptSchema.getTableForMember( Collections.singletonList(EthereumPlugin.HIDDEN_PREFIX + tableName) ); + // In Polypheny, algebra operations are used to represent various SQL operations such as scans, projections, filters, etc. // Unlike typical handling of DML (Data Manipulation Language) operations, Polypheny can include these operations as well. - // An insert operation (normally not in alg operations), for example, may be represented as a tree structure with a values algebra operation at the bottom (containing one or more rows), + // An insert operation (normally not in alg operations, but in dml), for example, may be represented as a tree structure with a values algebra operation at the bottom (containing one or more rows), // followed by projections if needed, and finally topped with a table modifier operation to signify the insert. // This internal representation of modifiers for DML operations allows for a cohesive handling of queries within Polypheny. @@ -193,7 +198,7 @@ void writeToStore( Event event, String tableName ) { // we use a project with dynamic parameters, so we can re-use it builder.project( rowType.getFieldList().stream().map( f -> new RexDynamicParam( f.getType(), f.getIndex() ) ).collect( Collectors.toList() ) ); - builder.insert( (AlgOptTable) table.getTable() ); // modifier + builder.insert( (AlgOptTable) table ); // modifier // todo we should re-use this for all batches (ignore right now); David will do this // In the current code, values are always newly built. Ideally, we would use dynamic/prepared parameters that can be cached and reused. // In Polypheny, we use a special structure: values (with only rowType) -> projection with dynamic parameters (see RegDynamicParam - map every rowType to a dynamic parameter) diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 7dd4a4f239..2c00eb4f27 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -228,11 +228,8 @@ protected void createColumnDefinition( CatalogColumn catalogColumn, StringBuilde } else if ( catalogColumn.collectionsType == PolyType.MAP ) { builder.append( getTypeString( PolyType.ARRAY ) ); } else { - String typeString = getTypeString( catalogColumn.type ); builder.append( " " ).append( getTypeString( catalogColumn.type ) ); - if ( catalogColumn.length != null && - !typeString.equalsIgnoreCase("BIGINT") && - !typeString.equalsIgnoreCase("BOOLEAN")) { + if ( catalogColumn.length != null ) { builder.append( "(" ).append( catalogColumn.length ); if ( catalogColumn.scale != null ) { builder.append( "," ).append( catalogColumn.scale ); From 5ba4ab7cb7a65a06cad9f6b2bafbd7c8d3390f95 Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Mon, 7 Aug 2023 13:03:42 +0200 Subject: [PATCH 07/22] Fill dynamic parameters with correct information --- .../db/adapter/ethereum/EthereumPlugin.java | 3 +- .../db/adapter/ethereum/EventCache.java | 85 ++++++++++++++++++- .../adapter/ethereum/EventCacheManager.java | 41 +++++++-- 3 files changed, 119 insertions(+), 10 deletions(-) diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index 1a61e0eeb4..b57d34e84b 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -211,7 +211,8 @@ public Map> getExportedColumns() { String[] transactionColumns = { "hash", "nonce", "block_hash", "block_number", "transaction_index", "from", "to", "value", "gas_price", "gas", "input", "creates", "public_key", "raw", "r", "s" }; PolyType[] transactionTypes = { PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR }; String[] commonEventColumns = { "removed", "log_index", "transaction_index", "transaction_hash", "block_hash", "block_number", "address" }; - PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR }; + PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR }; + // PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR }; // Event Data Dynamic Scheme List eventList = getEventsFromABI( etherscanApiKey, smartContractAddress ); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index 17740b0346..6adea4bf5f 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -28,13 +28,18 @@ import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.adapter.DataSource.ExportedColumn; import org.polypheny.db.ddl.DdlManager.FieldInformation; +import org.polypheny.db.type.PolyType; import org.web3j.abi.EventEncoder; +import org.web3j.abi.FunctionReturnDecoder; +import org.web3j.abi.TypeReference; import org.web3j.abi.datatypes.Event; +import org.web3j.abi.datatypes.Type; import org.web3j.protocol.Web3j; import org.web3j.protocol.core.DefaultBlockParameter; import org.web3j.protocol.core.methods.request.EthFilter; import org.web3j.protocol.core.methods.response.EthLog; import org.web3j.protocol.core.methods.response.EthLog.LogResult; +import org.web3j.protocol.core.methods.response.Log; import org.web3j.protocol.http.HttpService; // TODO evtl.: Man könnte es noch weiter abtrennen. Jedes Event hat einen Cache. Bzw. jedes Event macht sein eigenes caching (hat seine eigen URL) @@ -44,7 +49,7 @@ public class EventCache { private final int batchSizeInBlocks; - private final Map> cache = new ConcurrentHashMap<>(); // a cache for each event + private final Map>> cache = new ConcurrentHashMap<>(); // a cache for each event private final List events; // maintain a list of events private final String smartContractAddress; private final BigInteger fromBlock; @@ -121,8 +126,9 @@ public void startCaching() { if (cache.get( event ).size() == 0) { continue; } + String tableName = event.getName().toLowerCase(); - EventCacheManager.getInstance().writeToStore( event, tableName ); // write the event into the store // todo add table name // (T): Question -> e.g "delegateChanged"? + EventCacheManager.getInstance().writeToStore( tableName, cache.get( event ) ); // write the event into the store // todo add table name // (T): Question -> e.g "delegateChanged"? cache.get( event ).clear(); // clear cache batch } @@ -141,10 +147,52 @@ public synchronized void addToCache( Event event, BigInteger startBlock, BigInte filter.addSingleTopic( EventEncoder.encode( event ) ); try { - List logs = web3j.ethGetLogs( filter ).send().getLogs(); // I think I don't need this: .stream().map( log -> (LogResult) log ).collect( Collectors.toList() ); + List rawLogs = web3j.ethGetLogs( filter ).send().getLogs(); // I think I don't need this: .stream().map( log -> (LogResult) log ).collect( Collectors.toList() ); + + List> structuredLogs = new ArrayList<>(); + + for (EthLog.LogResult rawLogResult : rawLogs) { + Log rawLog = (Log) rawLogResult.get(); + List structuredLog = new ArrayList<>(); + + // Add all indexed values first + for (int i = 0; i < event.getParameters().size(); i++) { + TypeReference paramType = event.getParameters().get(i); + if (paramType.isIndexed()) { + structuredLog.add(extractIndexedValue(rawLog, paramType, i)); + } + } + + // Then add all non-indexed values + int nonIndexedPosition = 0; // Separate index for non-indexed parameters + for (int i = 0; i < event.getParameters().size(); i++) { + TypeReference paramType = event.getParameters().get(i); + if (!paramType.isIndexed()) { + structuredLog.add(extractNonIndexedValue(rawLog, paramType, nonIndexedPosition, event)); + nonIndexedPosition++; + } + } + + // Add other log information as needed + structuredLog.add(rawLog.isRemoved()); + structuredLog.add(rawLog.getLogIndex()); + structuredLog.add(rawLog.getTransactionIndex()); + structuredLog.add(rawLog.getTransactionHash()); + structuredLog.add(rawLog.getBlockHash()); + structuredLog.add(rawLog.getBlockNumber()); + structuredLog.add(rawLog.getAddress()); + + // Add other log information as needed + + structuredLogs.add(structuredLog); + } + + // If cache is a Map>>, you can store structuredLogs as follows + cache.put(event, structuredLogs); + // We are still writing to memory with logs & .addAll. Right now we will use the memory space. - cache.get( event ).addAll( logs ); + //cache.get( event ).addAll( rawLogs ); // Without using the memory: // Directly write to store. How? @@ -162,6 +210,35 @@ public synchronized void addToCache( Event event, BigInteger startBlock, BigInte } + private Object extractIndexedValue(Log rawLog, TypeReference paramType, int position) { + // Get the indexed parameter from the log based on its position + String topics = rawLog.getTopics().get(position + 1); // The first topic is usually the event signature + return FunctionReturnDecoder.decodeIndexedValue(topics, paramType); + } + + private Object extractNonIndexedValue(Log rawLog, TypeReference paramType, int position, Event event) { + List decodedValue = FunctionReturnDecoder.decode(rawLog.getData(), event.getNonIndexedParameters()); + return decodedValue.get(position); + } + + static PolyType convertToPolyType( String ethereumType ) { + if ( ethereumType.startsWith( "uint" ) || ethereumType.startsWith( "int" ) ) { + // Ethereum's uint and int types map to BIGINT in PolyType + return PolyType.BIGINT; + } else if ( ethereumType.startsWith( "bytes" ) || ethereumType.equals( "string" ) || ethereumType.equals( "address" ) ) { + // Ethereum's bytes, string and address types map to VARCHAR in PolyType + return PolyType.VARCHAR; + } else if ( ethereumType.equals( "bool" ) ) { + // Ethereum's bool type maps to BOOLEAN in PolyType + return PolyType.BOOLEAN; + } else { + // If the type is unknown, use VARCHAR as a general type + return PolyType.VARCHAR; + } + } + + + public CachingStatus getStatus() { throw new NotImplementedException(); } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 7091d8bbfd..a9a65e3c2a 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -17,6 +17,7 @@ package org.polypheny.db.adapter.ethereum; import java.math.BigInteger; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -58,7 +59,15 @@ import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.transaction.TransactionManager; +import org.polypheny.db.type.PolyType; +import org.web3j.abi.datatypes.Address; import org.web3j.abi.datatypes.Event; +import org.web3j.abi.datatypes.generated.Uint256; +import org.web3j.protocol.core.methods.response.EthLog; +import org.web3j.protocol.core.methods.response.Log; +import org.web3j.tx.Contract.EventValuesWithLog; +import org.web3j.abi.EventEncoder; + @Slf4j public class EventCacheManager { @@ -168,7 +177,7 @@ private Transaction getTransaction() { * @param event The event to be written to the store. * @param tableName The name of the table where the event should be stored. */ - void writeToStore( Event event, String tableName ) { + void writeToStore( String tableName, List> logResults ) { // Create a fresh transaction. A transaction can consist of multiple statements, // each representing a single SQL command to be executed as part of the transaction. Transaction transaction = getTransaction(); @@ -180,9 +189,8 @@ void writeToStore( Event event, String tableName ) { AlgBuilder builder = AlgBuilder.create( statement ); // TableEntry table = transaction.getSchema().getTable( EthereumPlugin.HIDDEN_PREFIX + tableName ); - AlgOptSchema algOptSchema = (AlgOptSchema) transaction.getSchema(); - AlgOptTable table = algOptSchema.getTableForMember( Collections.singletonList(EthereumPlugin.HIDDEN_PREFIX + tableName) ); - + AlgOptSchema algOptSchema = transaction.getCatalogReader(); + AlgOptTable table = algOptSchema.getTableForMember( Collections.singletonList( EthereumPlugin.HIDDEN_PREFIX + tableName ) ); // In Polypheny, algebra operations are used to represent various SQL operations such as scans, projections, filters, etc. // Unlike typical handling of DML (Data Manipulation Language) operations, Polypheny can include these operations as well. @@ -213,7 +221,30 @@ void writeToStore( Event event, String tableName ) { // TODO: Correctly fill in the dynamic parameters with the correct information from the event (event.getIndexedParameters().get( i++ ).toString()) int i = 0; for ( AlgDataTypeField field : rowType.getFieldList() ) { - statement.getDataContext().addParameterValues( field.getIndex(), field.getType(), List.of( event.getIndexedParameters().get( i++ ).toString() ) ); // take the correct indexedParameters - at the moment we only add one row at a time, could refactor to add the whole batch + long idx = field.getIndex(); + AlgDataType type = field.getType(); + + // Extracting the values for the current field from the log results + List fieldValues = new ArrayList<>(); + for ( List logResult : logResults ) { + Object value = logResult.get( i ); + Object processedValue; + // temporarily + if ( value instanceof Address ) { + processedValue = ((Address) value).toString(); + } else if ( value instanceof Uint256 ) { + processedValue = ((Uint256) value).getValue(); + } else if (value instanceof BigInteger) { + processedValue = value; // Already a BigInteger + } else if (value instanceof Boolean) { + processedValue = value; // No need to convert boolean + } else { + processedValue = value.toString(); // handle other types as needed + } + fieldValues.add( processedValue ); + } + i++; + statement.getDataContext().addParameterValues( idx, type, fieldValues ); // take the correct indexedParameters - at the moment we only add one row at a time, could refactor to add the whole batch } // execute the transaction (query will be executed) From dcad654bf612f0bb60097d8710a61ce00da288b4 Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Fri, 11 Aug 2023 12:22:22 +0200 Subject: [PATCH 08/22] Refactor getExportedColumn and fix cache type converting issues --- .../db/adapter/ethereum/EthereumPlugin.java | 333 ++++++++---------- .../db/adapter/ethereum/EthereumStarter.java | 2 + .../db/adapter/ethereum/EventCache.java | 64 ++-- .../adapter/ethereum/EventCacheManager.java | 63 +--- .../db/adapter/ethereum/EventData.java | 55 ++- .../db/adapter/ethereum/EventDataReader.java | 4 +- 6 files changed, 225 insertions(+), 296 deletions(-) diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index b57d34e84b..02a7518b22 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -29,6 +29,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.json.JSONArray; @@ -61,6 +62,7 @@ import org.web3j.abi.TypeReference; import org.web3j.abi.datatypes.Address; import org.web3j.abi.datatypes.Event; +import org.web3j.abi.datatypes.Type; import org.web3j.abi.datatypes.generated.Uint256; import org.web3j.protocol.Web3j; import org.web3j.protocol.http.HttpService; @@ -116,7 +118,7 @@ public void stop() { @AdapterSettingString(name = "EtherscanApiKey", description = "Etherscan API Token", defaultValue = "PJBVZ3BE1AI5AKIMXGK1HNC59PCDH7CQSP", position = 5, modifiable = true) // Event Data: Add annotation @AdapterSettingString(name = "fromBlock", description = "Fetch block from (Smart Contract)", defaultValue = "17669045", position = 6, modifiable = true) @AdapterSettingString(name = "toBlock", description = "Fetch block to (Smart Contract)", defaultValue = "17669155", position = 7, modifiable = true) - @AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = true, position = 8, modifiable = true) + @AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = false, position = 8, modifiable = true) @AdapterSettingString(name = "CachingAdapterTargetName", description = "Adapter Target Name", defaultValue = "hsqldb", position = 9, modifiable = true) public static class EthereumDataSource extends DataSource { @@ -134,12 +136,10 @@ public static class EthereumDataSource extends DataSource { private final BigInteger fromBlock; @Getter private final BigInteger toBlock; - private final Map eventInputsMap; + private final Map eventDataMap; private Boolean caching; private String cachingAdapterTargetName; - @Getter - List events = new ArrayList<>(); // for caching private Map> map; @@ -152,7 +152,7 @@ public EthereumDataSource( final int storeId, final String uniqueName, final Map this.etherscanApiKey = settings.get( "EtherscanApiKey" ); this.fromBlock = new BigInteger( settings.get( "fromBlock" ) ); this.toBlock = new BigInteger( settings.get( "toBlock" ) ); - this.eventInputsMap = new HashMap<>(); + this.eventDataMap = new HashMap<>(); this.caching = Boolean.parseBoolean( settings.get( "Caching" ) ); this.cachingAdapterTargetName = settings.get( "CachingAdapterTargetName" ); createInformationPage(); @@ -199,6 +199,7 @@ public void truncate( Context context, CatalogTable table ) { // Because the EthereumAdapter is a source, Pp will ask (call this method) always what the structure of this is adapter is. @Override public Map> getExportedColumns() { + log.warn( "getExportedColumn" ); // Ensure that this block of code is called only once by checking if 'map' is null before proceeding if ( map != null ) { return map; @@ -208,156 +209,25 @@ public Map> getExportedColumns() { String[] blockColumns = { "number", "hash", "parent_hash", "nonce", "sha3uncles", "logs_bloom", "transactions_root", "state_root", "receipts_root", "author", "miner", "mix_hash", "difficulty", "total_difficulty", "extra_data", "size", "gas_limit", "gas_used", "timestamp" }; PolyType[] blockTypes = { PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.TIMESTAMP }; + createExportedColumns( "block", map, blockColumns, blockTypes ); + String[] transactionColumns = { "hash", "nonce", "block_hash", "block_number", "transaction_index", "from", "to", "value", "gas_price", "gas", "input", "creates", "public_key", "raw", "r", "s" }; PolyType[] transactionTypes = { PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR }; + createExportedColumns( "transaction", map, transactionColumns, transactionTypes ); + String[] commonEventColumns = { "removed", "log_index", "transaction_index", "transaction_hash", "block_hash", "block_number", "address" }; PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR }; - // PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR }; - - // Event Data Dynamic Scheme - List eventList = getEventsFromABI( etherscanApiKey, smartContractAddress ); - eventInputsMap.clear(); // clear the map - events.clear(); // clear the map - for ( JSONObject event : eventList ) { - String eventName = event.getString( "name" ); // to match it later with catalogTable.name - JSONArray inputsArray = event.getJSONArray( "inputs" ); - List inputsList = new ArrayList<>(); - List> eventParameters = new ArrayList<>(); - for ( int i = 0; i < inputsArray.length(); i++ ) { - JSONObject inputObject = inputsArray.getJSONObject( i ); - inputsList.add( inputObject ); - // put this into a method (modular) - String type = inputObject.getString( "type" ); - boolean indexed = inputObject.getBoolean( "indexed" ); - if ( type.equals( "address" ) ) { - eventParameters.add( indexed ? new TypeReference
( true ) { - } : new TypeReference
( false ) { - } ); - } else if ( type.equals( "uint256" ) ) { - eventParameters.add( indexed ? new TypeReference( true ) { - } : new TypeReference( false ) { - } ); - } - } - eventInputsMap.put( eventName.toLowerCase(), new EventData( eventName, inputsList ) ); - events.add( new Event( eventName, eventParameters ) ); - } - - PolyType type = PolyType.VARCHAR; - PolyType collectionsType = null; - Integer length = 300; - Integer scale = null; - Integer dimension = null; - Integer cardinality = null; - int position = 0; - List blockCols = new ArrayList<>(); - for ( String blockCol : blockColumns ) { - blockCols.add( new ExportedColumn( - blockCol, - blockTypes[position], - collectionsType, - length, - scale, - dimension, - cardinality, - false, - SCHEMA_NAME, - "block", - blockCol, - position, - position == 0 ) ); - position++; - - } - map.put( "block", blockCols ); - - List transactCols = new ArrayList<>(); - position = 0; - for ( String transactCol : transactionColumns ) { - transactCols.add( new ExportedColumn( - transactCol, - transactionTypes[position], - collectionsType, - length, - scale, - dimension, - cardinality, - false, - SCHEMA_NAME, - "transaction", - transactCol, - position, - position == 0 ) ); - position++; - } - map.put( "transaction", transactCols ); - - // Event Data: Creating columns for each event for specified smart contract based on ABI - for ( Map.Entry eventEntry : eventInputsMap.entrySet() ) { - String eventName = eventEntry.getValue().getOriginalKey(); // Get the original event name - List inputsList = eventEntry.getValue().getData(); // Get the data - List eventDataCols = new ArrayList<>(); - int inputPosition = 0; - - for ( JSONObject input : inputsList ) { - String inputName = input.getString( "name" ); - PolyType inputType = convertToPolyType( input.getString( "type" ) ); // convert event types to polytype - eventDataCols.add( new ExportedColumn( - inputName, - inputType, - collectionsType, - getLengthForType(inputType), - scale, - dimension, - cardinality, - false, - SCHEMA_NAME, - eventName, // event name - inputName, - inputPosition, - inputPosition == 0 - ) ); - inputPosition++; - } - - // Adding common columns - for ( int i = 0; i < commonEventColumns.length; i++ ) { - String columnName = commonEventColumns[i]; - PolyType columnType = commonEventTypes[i]; - eventDataCols.add( new ExportedColumn( - columnName, - columnType, - collectionsType, - getLengthForType(columnType), - scale, - dimension, - cardinality, - false, - SCHEMA_NAME, - eventName, // event name - columnName, - inputPosition, - inputPosition == 0 - ) ); - inputPosition++; - } - - map.put( eventName, eventDataCols ); - } + createExportedColumnsForEvents( map, commonEventColumns, commonEventTypes ); if ( caching == Boolean.TRUE ) { // Disable caching to prevent multiple unnecessary attempts to cache the same data. caching = false; this.map = map; try { - // Catalog: Centralized repository that contains metadata, such as information about tables, columns, schemas, adapters (source & stores), interfaces and other database objects (central meta unit; db structure) - // Acts as a reference for the system to understand the structure and organization of the data and how to interact with various components - // Databases like PostgreSQL have a schema that effectively defines the schema. This is often simply referred to as a catalog. - - // Get the default adapter for caching (currently "hsqldb"; see AdapterSettingString CachingAdapterTargetName); where we want to put our data (will be a Store, not a Source anymore) - // cachingAdapterTargetName can only be a store, where we can insert data (you can't insert into a source) + List events = eventDataMap.values().stream() + .map( EventData::getEvent ) + .collect( Collectors.toList() ); CatalogAdapter cachingAdapter = Catalog.getInstance().getAdapter( cachingAdapterTargetName ); - // Register and initialize caching the events using the specified information EventCacheManager.getInstance() .register( getAdapterId(), cachingAdapter.id, clientURL, 50, smartContractAddress, fromBlock, toBlock, events, map ) .initializeCaching(); @@ -370,15 +240,6 @@ public Map> getExportedColumns() { return map; } - private Integer getLengthForType(PolyType type) { - switch (type) { - case VARCHAR: - return 300; - default: - return null; - } - } - @Override public boolean prepare( PolyXid xid ) { @@ -443,7 +304,7 @@ protected void createInformationPage() { protected List getEventsFromABI( String etherscanApiKey, String contractAddress ) { - List eventList = new ArrayList<>(); + List events = new ArrayList<>(); try { URL url = new URL( "https://api.etherscan.io/api?module=contract&action=getabi&address=" + contractAddress + "&apikey=" + etherscanApiKey ); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); @@ -461,67 +322,155 @@ protected List getEventsFromABI( String etherscanApiKey, String cont JSONObject jsonObject = new JSONObject( response.toString() ); String abi = jsonObject.getString( "result" ); - // Convert ABI string to JSON Array - JSONArray abiArray = new JSONArray( abi ); + JSONArray abiArray = new JSONArray( abi ); // Convert ABI string to JSON Array for ( int i = 0; i < abiArray.length(); i++ ) { JSONObject obj = abiArray.getJSONObject( i ); - // Check if the current object is an event if ( obj.getString( "type" ).equals( "event" ) ) { - eventList.add( obj ); + events.add( obj ); } } } } catch ( IOException e ) { + // todo: handle errors; for example no abi or internet connection etc. throw new RuntimeException( e ); } - return eventList; + return events; } - static PolyType convertToPolyType( String ethereumType ) { - if ( ethereumType.startsWith( "uint" ) || ethereumType.startsWith( "int" ) ) { - // Ethereum's uint and int types map to BIGINT in PolyType - return PolyType.BIGINT; - } else if ( ethereumType.startsWith( "bytes" ) || ethereumType.equals( "string" ) || ethereumType.equals( "address" ) ) { - // Ethereum's bytes, string and address types map to VARCHAR in PolyType - return PolyType.VARCHAR; - } else if ( ethereumType.equals( "bool" ) ) { - // Ethereum's bool type maps to BOOLEAN in PolyType - return PolyType.BOOLEAN; - } else { - // If the type is unknown, use VARCHAR as a general type - return PolyType.VARCHAR; + protected Event getEventFromCatalogTable( String catalogTableName ) { + if ( catalogTableName.equals( "block" ) || catalogTableName.equals( "transaction" ) ) { + return null; } + return eventDataMap.get( catalogTableName ).getEvent(); } - protected Event getEventFromCatalogTable( String catalogTableName ) { - if ( catalogTableName.equals( "block" ) || catalogTableName.equals( "transaction" ) ) { - return null; + private void createExportedColumns( String physicalTableName, Map> map, String[] columns, PolyType[] types ) { + PolyType collectionsType = null; + Integer length = 300; + Integer scale = null; + Integer dimension = null; + Integer cardinality = null; + int position = 0; + List cols = new ArrayList<>(); + for ( String col : columns ) { + cols.add( new ExportedColumn( + col, + types[position], + collectionsType, + length, + scale, + dimension, + cardinality, + false, + SCHEMA_NAME, + physicalTableName, + col, + position, + position == 0 ) ); + position++; + } - EventData eventData = eventInputsMap.get( catalogTableName ); - List jsonObjects = eventData.getData(); - List> parameterTypes = new ArrayList<>(); - for ( JSONObject jsonObject : jsonObjects ) { - String type = jsonObject.getString( "type" ); - boolean indexed = jsonObject.getBoolean( "indexed" ); - - if ( type.equals( "address" ) ) { - parameterTypes.add( indexed ? new TypeReference
( true ) { - } : new TypeReference
( false ) { - } ); - } else if ( type.equals( "uint256" ) ) { - parameterTypes.add( indexed ? new TypeReference( true ) { - } : new TypeReference( false ) { - } ); + map.put( physicalTableName, cols ); + } + + + private void createExportedColumnsForEvents( Map> map, String[] commonEventColumns, PolyType[] commonEventTypes ) { + // Event Data Dynamic Scheme + List contractEvents = getEventsFromABI( etherscanApiKey, smartContractAddress ); + + for ( JSONObject event : contractEvents ) { + String eventName = event.getString( "name" ); // to match it later with catalogTable.name + JSONArray abiInputs = event.getJSONArray( "inputs" ); // indexed and non-indexed values (topics + data) + eventDataMap.put( eventName.toLowerCase(), new EventData( eventName, abiInputs ) ); + } + + PolyType collectionsType = null; + Integer scale = null; + Integer dimension = null; + Integer cardinality = null; + + // Event Data: Creating columns for each event for specified smart contract based on ABI + for ( Map.Entry eventEntry : eventDataMap.entrySet() ) { + String eventName = eventEntry.getValue().getOriginalKey(); // Get the original event name + JSONArray abiInputs = eventEntry.getValue().getAbiInputs(); // Get the data + List eventDataCols = new ArrayList<>(); + int inputPosition = 0; + + for ( int i = 0; i < abiInputs.length(); i++ ) { + JSONObject inputObject = abiInputs.getJSONObject( i ); + String col = inputObject.getString( "name" ); + PolyType type = convertToPolyType( inputObject.getString( "type" ) ); // convert event types to polytype + eventDataCols.add( new ExportedColumn( + col, + type, + collectionsType, + getLengthForType( type ), + scale, + dimension, + cardinality, + false, + SCHEMA_NAME, + eventName, // event name + col, + inputPosition, + inputPosition == 0 + ) ); + inputPosition++; } - // ... + + // Adding common columns + for ( int i = 0; i < commonEventColumns.length; i++ ) { + String columnName = commonEventColumns[i]; + PolyType columnType = commonEventTypes[i]; + eventDataCols.add( new ExportedColumn( + columnName, + columnType, + collectionsType, + getLengthForType( columnType ), + scale, + dimension, + cardinality, + false, + SCHEMA_NAME, + eventName, // event name + columnName, + inputPosition, + inputPosition == 0 + ) ); + inputPosition++; + } + + map.put( eventName, eventDataCols ); + } + } + + + private Integer getLengthForType( PolyType type ) { + switch ( type ) { + case VARCHAR: + return 300; + default: + return null; + } + } + + + static PolyType convertToPolyType( String type ) { + // todo: convert all types in evm to polytype + switch ( type ) { + case "address": + return PolyType.VARCHAR; + case "uint256": + return PolyType.BIGINT; + default: + return null; } - return new Event( eventData.getOriginalKey(), parameterTypes ); } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java index 7045b379fb..9f11e7fd0e 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java @@ -16,6 +16,7 @@ package org.polypheny.db.adapter.ethereum; +import lombok.extern.slf4j.Slf4j; import org.polypheny.db.iface.Authenticator; import org.polypheny.db.processing.TransactionExtension; import org.polypheny.db.transaction.TransactionManager; @@ -24,6 +25,7 @@ // Polypheny will startup and then get all the plugins // But at this point there is no access to the TM // We just say here, hey this is a TransactionExtension that says: Hey this an extension that the TM needs, please call this too as soon as we have the TM +@Slf4j public class EthereumStarter implements TransactionExtension { @Override diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index 6adea4bf5f..fde8b21dbd 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -47,7 +47,6 @@ @Slf4j // library to use logging annotations public class EventCache { - private final int batchSizeInBlocks; private final Map>> cache = new ConcurrentHashMap<>(); // a cache for each event private final List events; // maintain a list of events @@ -87,9 +86,10 @@ public void initializeCaching() { // In this method, we create the appropriate schemas and tables in the catalog. (see also createTable) private void createSchema() { - columns.remove("block"); - columns.remove("transaction"); - // todo: block and trx columns are also included. Remove? + log.warn( "start to create schema" ); + columns.remove( "block" ); + columns.remove( "transaction" ); + // TODO: block and trx columns are also included. Remove? Map> columnInformations = columns.entrySet() .stream() .collect( @@ -123,7 +123,7 @@ public void startCaching() { // just another loop for debugging reasons. I will put it in the first loop later on. for ( Event event : events ) { - if (cache.get( event ).size() == 0) { + if ( cache.get( event ).size() == 0 ) { continue; } @@ -147,49 +147,48 @@ public synchronized void addToCache( Event event, BigInteger startBlock, BigInte filter.addSingleTopic( EventEncoder.encode( event ) ); try { - List rawLogs = web3j.ethGetLogs( filter ).send().getLogs(); // I think I don't need this: .stream().map( log -> (LogResult) log ).collect( Collectors.toList() ); + List rawLogs = web3j.ethGetLogs( filter ).send().getLogs(); List> structuredLogs = new ArrayList<>(); - for (EthLog.LogResult rawLogResult : rawLogs) { + for ( EthLog.LogResult rawLogResult : rawLogs ) { Log rawLog = (Log) rawLogResult.get(); List structuredLog = new ArrayList<>(); // Add all indexed values first - for (int i = 0; i < event.getParameters().size(); i++) { - TypeReference paramType = event.getParameters().get(i); - if (paramType.isIndexed()) { - structuredLog.add(extractIndexedValue(rawLog, paramType, i)); + for ( int i = 0; i < event.getParameters().size(); i++ ) { + TypeReference paramType = event.getParameters().get( i ); + if ( paramType.isIndexed() ) { + structuredLog.add( extractIndexedValue( rawLog, paramType, i ) ); } } // Then add all non-indexed values int nonIndexedPosition = 0; // Separate index for non-indexed parameters - for (int i = 0; i < event.getParameters().size(); i++) { - TypeReference paramType = event.getParameters().get(i); - if (!paramType.isIndexed()) { - structuredLog.add(extractNonIndexedValue(rawLog, paramType, nonIndexedPosition, event)); + for ( int i = 0; i < event.getParameters().size(); i++ ) { + TypeReference paramType = event.getParameters().get( i ); + if ( !paramType.isIndexed() ) { + structuredLog.add( extractNonIndexedValue( rawLog, paramType, nonIndexedPosition, event ) ); nonIndexedPosition++; } } // Add other log information as needed - structuredLog.add(rawLog.isRemoved()); - structuredLog.add(rawLog.getLogIndex()); - structuredLog.add(rawLog.getTransactionIndex()); - structuredLog.add(rawLog.getTransactionHash()); - structuredLog.add(rawLog.getBlockHash()); - structuredLog.add(rawLog.getBlockNumber()); - structuredLog.add(rawLog.getAddress()); + structuredLog.add( rawLog.isRemoved() ); + structuredLog.add( rawLog.getLogIndex() ); + structuredLog.add( rawLog.getTransactionIndex() ); + structuredLog.add( rawLog.getTransactionHash() ); + structuredLog.add( rawLog.getBlockHash() ); + structuredLog.add( rawLog.getBlockNumber() ); + structuredLog.add( rawLog.getAddress() ); // Add other log information as needed - structuredLogs.add(structuredLog); + structuredLogs.add( structuredLog ); } // If cache is a Map>>, you can store structuredLogs as follows - cache.put(event, structuredLogs); - + cache.put( event, structuredLogs ); // We are still writing to memory with logs & .addAll. Right now we will use the memory space. //cache.get( event ).addAll( rawLogs ); @@ -210,17 +209,19 @@ public synchronized void addToCache( Event event, BigInteger startBlock, BigInte } - private Object extractIndexedValue(Log rawLog, TypeReference paramType, int position) { + private Object extractIndexedValue( Log rawLog, TypeReference paramType, int position ) { // Get the indexed parameter from the log based on its position - String topics = rawLog.getTopics().get(position + 1); // The first topic is usually the event signature - return FunctionReturnDecoder.decodeIndexedValue(topics, paramType); + String topics = rawLog.getTopics().get( position + 1 ); // The first topic is usually the event signature + return FunctionReturnDecoder.decodeIndexedValue( topics, paramType ); } - private Object extractNonIndexedValue(Log rawLog, TypeReference paramType, int position, Event event) { - List decodedValue = FunctionReturnDecoder.decode(rawLog.getData(), event.getNonIndexedParameters()); - return decodedValue.get(position); + + private Object extractNonIndexedValue( Log rawLog, TypeReference paramType, int position, Event event ) { + List decodedValue = FunctionReturnDecoder.decode( rawLog.getData(), event.getNonIndexedParameters() ); + return decodedValue.get( position ); } + static PolyType convertToPolyType( String ethereumType ) { if ( ethereumType.startsWith( "uint" ) || ethereumType.startsWith( "int" ) ) { // Ethereum's uint and int types map to BIGINT in PolyType @@ -238,7 +239,6 @@ static PolyType convertToPolyType( String ethereumType ) { } - public CachingStatus getStatus() { throw new NotImplementedException(); } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index a9a65e3c2a..9c218eb64d 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -53,20 +53,14 @@ import org.polypheny.db.plan.AlgOptSchema; import org.polypheny.db.plan.AlgOptTable; import org.polypheny.db.rex.RexDynamicParam; -import org.polypheny.db.schema.PolyphenyDbSchema.TableEntry; import org.polypheny.db.tools.AlgBuilder; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.transaction.TransactionManager; -import org.polypheny.db.type.PolyType; import org.web3j.abi.datatypes.Address; import org.web3j.abi.datatypes.Event; import org.web3j.abi.datatypes.generated.Uint256; -import org.web3j.protocol.core.methods.response.EthLog; -import org.web3j.protocol.core.methods.response.Log; -import org.web3j.tx.Contract.EventValuesWithLog; -import org.web3j.abi.EventEncoder; @Slf4j @@ -97,7 +91,6 @@ public static synchronized EventCacheManager getAndSet( TransactionManager manag } - // Returns the singleton instance; we only want to create exactly one EventCacheManager; Singleton pattern is a Java pattern; EventCacheManager can be called everywhere public static EventCacheManager getInstance() { if ( INSTANCE == null ) { throw new RuntimeException( String.format( "The %s was not correctly initialized.", EventCacheManager.class.getSimpleName() ) ); @@ -106,15 +99,11 @@ public static EventCacheManager getInstance() { } - // Transaction Manager: Process of ensuring that database transactions are processed reliably, mainly focusing on the ACID properties (T) private EventCacheManager( TransactionManager transactionManager ) { this.transactionManager = transactionManager; } - // sourceAdapterId == ethereum adapter (source) - // targetAdapterId == e.g. hsqldb adapter (store) - // construct and register a new EventCache for a specific source adapter; in this case the ethereum adapter public EventCache register( int sourceAdapterId, int targetAdapterId, String clientUrl, int batchSizeInBlocks, String smartContractAddress, BigInteger fromBlock, BigInteger toBlock, List events, Map> map ) { EventCache cache = new EventCache( sourceAdapterId, targetAdapterId, clientUrl, batchSizeInBlocks, smartContractAddress, fromBlock, toBlock, events, map ); this.caches.put( sourceAdapterId, cache ); @@ -122,7 +111,6 @@ public EventCache register( int sourceAdapterId, int targetAdapterId, String cli } - // Retrieves the EventCache object associated with the specified adapterId. (T) @Nullable public EventCache getCache( int adapterId ) { return caches.get( adapterId ); @@ -130,6 +118,7 @@ public EventCache getCache( int adapterId ) { void createTables( int sourceAdapterId, Map> tableInformations, int targetAdapterId ) { + log.warn( "start to create tables" ); try { long namespaceId = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, "public" ).id; // get the default schema Transaction transaction = getTransaction(); // get the transaction @@ -154,8 +143,6 @@ void createTables( int sourceAdapterId, Map> tabl private Transaction getTransaction() { try { - // Question: Where is thsi Catalog coming from, bzw. defaultDatabaseId etc.? - // Why does the TM need catalog metadata? Transaction transaction = transactionManager.startTransaction( Catalog.defaultDatabaseId, Catalog.defaultUserId, false, "Ethereum Plugin" ); return transaction; } catch ( UnknownSchemaException | UnknownDatabaseException | GenericCatalogException | UnknownUserException e ) { @@ -164,60 +151,26 @@ private Transaction getTransaction() { } - /** - * Writes the event(s) caches to a specified table within the store. - * Meta: This method constructs a relational algebra query, representing - * the logical query plan, and then translates it into the underlying - * database's native query language. This translation leverages - * an internal algebra, common among many databases, to facilitate - * the conversion process. - * - * This approach allows for more efficient reusability, making the execution significantly faster. - * - * @param event The event to be written to the store. - * @param tableName The name of the table where the event should be stored. - */ void writeToStore( String tableName, List> logResults ) { - // Create a fresh transaction. A transaction can consist of multiple statements, - // each representing a single SQL command to be executed as part of the transaction. Transaction transaction = getTransaction(); - Statement statement = transaction.createStatement(); // statement is an object used to execute SQL commands; Creating an individual SQL command within the transaction + Statement statement = transaction.createStatement(); - // Create an Algebra Builder (AlgBuilder) instance. - // This is a helper class to simplify the construction of the relational algebra tree representing the SQL query. - // It abstracts the complexity of building this tree, allowing for the creation of even complex expressions in a more manageable way. AlgBuilder builder = AlgBuilder.create( statement ); // TableEntry table = transaction.getSchema().getTable( EthereumPlugin.HIDDEN_PREFIX + tableName ); AlgOptSchema algOptSchema = transaction.getCatalogReader(); AlgOptTable table = algOptSchema.getTableForMember( Collections.singletonList( EthereumPlugin.HIDDEN_PREFIX + tableName ) ); - // In Polypheny, algebra operations are used to represent various SQL operations such as scans, projections, filters, etc. - // Unlike typical handling of DML (Data Manipulation Language) operations, Polypheny can include these operations as well. - // An insert operation (normally not in alg operations, but in dml), for example, may be represented as a tree structure with a values algebra operation at the bottom (containing one or more rows), - // followed by projections if needed, and finally topped with a table modifier operation to signify the insert. - // This internal representation of modifiers for DML operations allows for a cohesive handling of queries within Polypheny. - - // 'rowType' represents the structure of a row in the table, like columns. For example, one might be 'name', another 'age', etc. - // It essentially provides the signature of what the row looks like for this particular table. AlgDataType rowType = table.getTable().getRowType( transaction.getTypeFactory() ); builder.values( rowType ); - - // we use a project with dynamic parameters, so we can re-use it builder.project( rowType.getFieldList().stream().map( f -> new RexDynamicParam( f.getType(), f.getIndex() ) ).collect( Collectors.toList() ) ); - - builder.insert( (AlgOptTable) table ); // modifier - // todo we should re-use this for all batches (ignore right now); David will do this - // In the current code, values are always newly built. Ideally, we would use dynamic/prepared parameters that can be cached and reused. - // In Polypheny, we use a special structure: values (with only rowType) -> projection with dynamic parameters (see RegDynamicParam - map every rowType to a dynamic parameter) - // This allows us to map each rowType to a dynamic parameter. We have theoretically dynamic parameters that we could re-use. We could cache them and not always create it newly. + builder.insert( (AlgOptTable) table ); + // TODO: we should re-use this for all batches (ignore right now); David will do this AlgNode node = builder.build(); // Construct the algebraic node AlgRoot root = AlgRoot.of( node, Kind.INSERT ); // Wrap the node into an AlgRoot as required by Polypheny // Add the dynamic parameters to the context - // This part (above) could be reused, but we still need to create new statements and define what these dynamic parameters are (below) - // addParameterValues: add all the names (Alex, Joe, Jane, etc.) for index 0, all the ages (rows) for index 1 and so on... // TODO: Correctly fill in the dynamic parameters with the correct information from the event (event.getIndexedParameters().get( i++ ).toString()) int i = 0; for ( AlgDataTypeField field : rowType.getFieldList() ) { @@ -234,9 +187,9 @@ void writeToStore( String tableName, List> logResults ) { processedValue = ((Address) value).toString(); } else if ( value instanceof Uint256 ) { processedValue = ((Uint256) value).getValue(); - } else if (value instanceof BigInteger) { + } else if ( value instanceof BigInteger ) { processedValue = value; // Already a BigInteger - } else if (value instanceof Boolean) { + } else if ( value instanceof Boolean ) { processedValue = value; // No need to convert boolean } else { processedValue = value.toString(); // handle other types as needed @@ -247,10 +200,12 @@ void writeToStore( String tableName, List> logResults ) { statement.getDataContext().addParameterValues( idx, type, fieldValues ); // take the correct indexedParameters - at the moment we only add one row at a time, could refactor to add the whole batch } + log.warn( "write to store before; table name: " + tableName ); // execute the transaction (query will be executed) PolyImplementation implementation = statement.getQueryProcessor().prepareQuery( root, false ); // implements the code basically + log.warn( "write to store after; table name: " + tableName ); implementation.getRows( statement, -1 ); // Executes the query, with -1 meaning to fill in the whole batch - + log.warn( "finish write to store for table: " + tableName ); } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java index d932330c56..131800e709 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java @@ -16,35 +16,58 @@ package org.polypheny.db.adapter.ethereum; +import static org.web3j.abi.Utils.convert; + +import java.util.ArrayList; import java.util.List; +import java.util.stream.Collectors; +import lombok.Getter; +import org.json.JSONArray; import org.json.JSONObject; +import org.web3j.abi.TypeReference; +import org.web3j.abi.datatypes.Address; +import org.web3j.abi.datatypes.Event; +import org.web3j.abi.datatypes.Type; +import org.web3j.abi.datatypes.generated.Uint256; +// TODO: extend EventData with Data public class EventData { + @Getter private String originalKey; private String lowercaseKey; - private List data; + @Getter + private Event event; + @Getter + private JSONArray abiInputs; - public EventData( String originalKey, List data ) { + public EventData( String originalKey, JSONArray abiInputs ) { this.originalKey = originalKey; this.lowercaseKey = originalKey.toLowerCase(); - this.data = data; - } - - - public String getOriginalKey() { - return originalKey; + this.abiInputs = abiInputs; + List> typeReferences = createTypeReferences( abiInputs ); + this.event = new Event( originalKey, typeReferences ); // create event based on event name (original key and inputs) } - - public String getLowercaseKey() { - return lowercaseKey; - } - - - public List getData() { - return data; + private static List> createTypeReferences( JSONArray abiInputs ) { + List> typeReferences = new ArrayList<>(); + for ( int i = 0; i < abiInputs.length(); i++ ) { + JSONObject inputObject = abiInputs.getJSONObject( i ); + String type = inputObject.getString( "type" ); + boolean indexed = inputObject.getBoolean( "indexed" ); + if ( type.equals( "address" ) ) { + typeReferences.add( indexed ? new TypeReference
( indexed ) { + } : new TypeReference
( false ) { + } ); + // typeReferences.add( new TypeReference
( indexed ) ); + } else if ( type.equals( "uint256" ) ) { + typeReferences.add( indexed ? new TypeReference( true ) { + } : new TypeReference( false ) { + } ); + } + } + return typeReferences; } } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java index 7a6058989e..1295cf5230 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java @@ -102,8 +102,8 @@ public String[] readNext() throws IOException { // Add additional columns String[] extendedResult = Arrays.copyOf( result, result.length + 7 ); extendedResult[result.length] = Boolean.toString( log.isRemoved() ); - extendedResult[result.length + 1] = log.getLogIndexRaw(); - extendedResult[result.length + 2] = log.getTransactionIndexRaw(); + extendedResult[result.length + 1] = log.getLogIndex().toString(); + extendedResult[result.length + 2] = log.getTransactionIndex().toString(); extendedResult[result.length + 3] = log.getTransactionHash(); extendedResult[result.length + 4] = log.getBlockHash(); extendedResult[result.length + 5] = log.getBlockNumber().toString(); From e2f8dcb843a39fbf47edce463bf17f8e33350887 Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Fri, 11 Aug 2023 15:44:29 +0200 Subject: [PATCH 09/22] Fix caching bug --- .../org/polypheny/db/transaction/Lock.java | 4 ++ .../db/adapter/ethereum/EthereumPlugin.java | 45 ++++++++++++------- .../db/adapter/ethereum/EventCache.java | 2 +- .../adapter/ethereum/EventCacheManager.java | 28 +++++++++--- 4 files changed, 57 insertions(+), 22 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/transaction/Lock.java b/dbms/src/main/java/org/polypheny/db/transaction/Lock.java index e542b86d70..d91b8820d7 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/Lock.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/Lock.java @@ -21,10 +21,12 @@ import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; import org.polypheny.db.transaction.Transaction.AccessMode; // Based on code taken from https://github.com/dstibrany/LockManager +@Slf4j public class Lock { private final Set owners = new HashSet<>(); @@ -134,6 +136,7 @@ private void acquireSLock( TransactionImpl txn ) throws InterruptedException { private void acquireXLock( TransactionImpl txn ) throws InterruptedException { lock.lock(); + log.warn("Acquire x lock; before: " + xLockCount); try { while ( isXLocked() || isSLocked() ) { waitForGraph.add( txn, owners ); @@ -144,6 +147,7 @@ private void acquireXLock( TransactionImpl txn ) throws InterruptedException { owners.add( txn ); } finally { lock.unlock(); + log.warn("Acquire x lock; after: " + xLockCount); } } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index 02a7518b22..5c770ec1dc 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -118,7 +118,7 @@ public void stop() { @AdapterSettingString(name = "EtherscanApiKey", description = "Etherscan API Token", defaultValue = "PJBVZ3BE1AI5AKIMXGK1HNC59PCDH7CQSP", position = 5, modifiable = true) // Event Data: Add annotation @AdapterSettingString(name = "fromBlock", description = "Fetch block from (Smart Contract)", defaultValue = "17669045", position = 6, modifiable = true) @AdapterSettingString(name = "toBlock", description = "Fetch block to (Smart Contract)", defaultValue = "17669155", position = 7, modifiable = true) - @AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = false, position = 8, modifiable = true) + @AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = true, position = 8, modifiable = true) @AdapterSettingString(name = "CachingAdapterTargetName", description = "Adapter Target Name", defaultValue = "hsqldb", position = 9, modifiable = true) public static class EthereumDataSource extends DataSource { @@ -155,8 +155,13 @@ public EthereumDataSource( final int storeId, final String uniqueName, final Map this.eventDataMap = new HashMap<>(); this.caching = Boolean.parseBoolean( settings.get( "Caching" ) ); this.cachingAdapterTargetName = settings.get( "CachingAdapterTargetName" ); - createInformationPage(); - enableInformationPage(); + new Thread( () -> { + createInformationPage(); + enableInformationPage(); + } ).start(); + + //createInformationPage(); + //enableInformationPage(); } @@ -223,18 +228,28 @@ public Map> getExportedColumns() { // Disable caching to prevent multiple unnecessary attempts to cache the same data. caching = false; this.map = map; - try { - List events = eventDataMap.values().stream() - .map( EventData::getEvent ) - .collect( Collectors.toList() ); - CatalogAdapter cachingAdapter = Catalog.getInstance().getAdapter( cachingAdapterTargetName ); - EventCacheManager.getInstance() - .register( getAdapterId(), cachingAdapter.id, clientURL, 50, smartContractAddress, fromBlock, toBlock, events, map ) - .initializeCaching(); - } catch ( UnknownAdapterException e ) { - // If the specified adapter is not found, throw a RuntimeException - throw new RuntimeException( e ); - } + new Thread( () -> { + try { + Thread.sleep( 1200 ); + } catch ( InterruptedException e ) { + throw new RuntimeException( e ); + } + try { + + List events = eventDataMap.values().stream() + .map( EventData::getEvent ) + .collect( Collectors.toList() ); + CatalogAdapter cachingAdapter = Catalog.getInstance().getAdapter( cachingAdapterTargetName ); + EventCacheManager.getInstance() + .register( getAdapterId(), cachingAdapter.id, clientURL, 50, smartContractAddress, fromBlock, toBlock, events, map ) + .initializeCaching(); + + + } catch ( UnknownAdapterException e ) { + // If the specified adapter is not found, throw a RuntimeException + throw new RuntimeException( e ); + } + } ).start(); } return map; diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index fde8b21dbd..b4bcb69c02 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -137,7 +137,7 @@ public void startCaching() { } - public synchronized void addToCache( Event event, BigInteger startBlock, BigInteger endBlock ) { + public void addToCache( Event event, BigInteger startBlock, BigInteger endBlock ) { EthFilter filter = new EthFilter( DefaultBlockParameter.valueOf( startBlock ), DefaultBlockParameter.valueOf( endBlock ), diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 9c218eb64d..21a356e39e 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -33,6 +33,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.logical.relational.LogicalValues; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; @@ -64,7 +65,7 @@ @Slf4j -public class EventCacheManager { +public class EventCacheManager implements Runnable { // Singleton instance of EventCacheManager (T) private static EventCacheManager INSTANCE = null; @@ -152,6 +153,9 @@ private Transaction getTransaction() { void writeToStore( String tableName, List> logResults ) { + if (logResults.isEmpty()) { + return; + } Transaction transaction = getTransaction(); Statement statement = transaction.createStatement(); @@ -162,8 +166,8 @@ void writeToStore( String tableName, List> logResults ) { AlgOptTable table = algOptSchema.getTableForMember( Collections.singletonList( EthereumPlugin.HIDDEN_PREFIX + tableName ) ); AlgDataType rowType = table.getTable().getRowType( transaction.getTypeFactory() ); - builder.values( rowType ); - builder.project( rowType.getFieldList().stream().map( f -> new RexDynamicParam( f.getType(), f.getIndex() ) ).collect( Collectors.toList() ) ); + builder.push( LogicalValues.createOneRow( builder.getCluster() ) ); + builder.project( rowType.getFieldList().stream().map( f -> new RexDynamicParam( f.getType(), f.getIndex() ) ).collect( Collectors.toList() ), rowType.getFieldNames() ); builder.insert( (AlgOptTable) table ); // TODO: we should re-use this for all batches (ignore right now); David will do this @@ -171,6 +175,7 @@ void writeToStore( String tableName, List> logResults ) { AlgRoot root = AlgRoot.of( node, Kind.INSERT ); // Wrap the node into an AlgRoot as required by Polypheny // Add the dynamic parameters to the context + // don't add if value = 0 // TODO: Correctly fill in the dynamic parameters with the correct information from the event (event.getIndexedParameters().get( i++ ).toString()) int i = 0; for ( AlgDataTypeField field : rowType.getFieldList() ) { @@ -184,11 +189,11 @@ void writeToStore( String tableName, List> logResults ) { Object processedValue; // temporarily if ( value instanceof Address ) { - processedValue = ((Address) value).toString(); + processedValue = value.toString(); } else if ( value instanceof Uint256 ) { - processedValue = ((Uint256) value).getValue(); + processedValue = ((Uint256) value).getValue() == null ? null : ((Uint256) value).getValue().longValue() ; } else if ( value instanceof BigInteger ) { - processedValue = value; // Already a BigInteger + processedValue = value == null ? null : ((BigInteger) value).longValue(); // Already a BigInteger } else if ( value instanceof Boolean ) { processedValue = value; // No need to convert boolean } else { @@ -206,6 +211,11 @@ void writeToStore( String tableName, List> logResults ) { log.warn( "write to store after; table name: " + tableName ); implementation.getRows( statement, -1 ); // Executes the query, with -1 meaning to fill in the whole batch log.warn( "finish write to store for table: " + tableName ); + try { + transaction.commit(); + } catch ( TransactionException e ) { + throw new RuntimeException( e ); + } } @@ -214,4 +224,10 @@ private Map getAllStreamStatus() { return caches.values().stream().collect( Collectors.toMap( c -> c.sourceAdapterId, EventCache::getStatus ) ); } + + @Override + public void run() { + + } + } \ No newline at end of file From 7028d1bcaa7a1e339b5f7be2d7edca17830e2a5d Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Mon, 14 Aug 2023 00:33:23 +0200 Subject: [PATCH 10/22] Get current status of caching --- .../org/polypheny/db/transaction/Lock.java | 4 -- .../db/adapter/ethereum/EthereumPlugin.java | 6 +-- .../db/adapter/ethereum/EventCache.java | 46 +++++++++---------- .../adapter/ethereum/EventCacheManager.java | 21 +++------ 4 files changed, 32 insertions(+), 45 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/transaction/Lock.java b/dbms/src/main/java/org/polypheny/db/transaction/Lock.java index d91b8820d7..e542b86d70 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/Lock.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/Lock.java @@ -21,12 +21,10 @@ import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; import org.polypheny.db.transaction.Transaction.AccessMode; // Based on code taken from https://github.com/dstibrany/LockManager -@Slf4j public class Lock { private final Set owners = new HashSet<>(); @@ -136,7 +134,6 @@ private void acquireSLock( TransactionImpl txn ) throws InterruptedException { private void acquireXLock( TransactionImpl txn ) throws InterruptedException { lock.lock(); - log.warn("Acquire x lock; before: " + xLockCount); try { while ( isXLocked() || isSLocked() ) { waitForGraph.add( txn, owners ); @@ -147,7 +144,6 @@ private void acquireXLock( TransactionImpl txn ) throws InterruptedException { owners.add( txn ); } finally { lock.unlock(); - log.warn("Acquire x lock; after: " + xLockCount); } } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index 5c770ec1dc..bae7eccc65 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -73,7 +73,7 @@ public class EthereumPlugin extends Plugin { public static final String ADAPTER_NAME = "ETHEREUM"; - public static final String HIDDEN_PREFIX = "__hidden__"; // evtl motzt der - db kann evtl nicht mit dollar zeichen nehmen __hidden__ + public static final String HIDDEN_PREFIX = "__hidden__"; /** @@ -228,6 +228,7 @@ public Map> getExportedColumns() { // Disable caching to prevent multiple unnecessary attempts to cache the same data. caching = false; this.map = map; + // todo: fix concurrency issues (dirty solution right now) new Thread( () -> { try { Thread.sleep( 1200 ); @@ -235,7 +236,6 @@ public Map> getExportedColumns() { throw new RuntimeException( e ); } try { - List events = eventDataMap.values().stream() .map( EventData::getEvent ) .collect( Collectors.toList() ); @@ -243,8 +243,6 @@ public Map> getExportedColumns() { EventCacheManager.getInstance() .register( getAdapterId(), cachingAdapter.id, clientURL, 50, smartContractAddress, fromBlock, toBlock, events, map ) .initializeCaching(); - - } catch ( UnknownAdapterException e ) { // If the specified adapter is not found, throw a RuntimeException throw new RuntimeException( e ); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index b4bcb69c02..83b4056f29 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -53,6 +53,7 @@ public class EventCache { private final String smartContractAddress; private final BigInteger fromBlock; private final BigInteger toBlock; + private BigInteger currentBlock; protected final Web3j web3j; public final int sourceAdapterId; @@ -67,6 +68,7 @@ public EventCache( int sourceAdapterId, int targetAdapterId, String clientUrl, i this.batchSizeInBlocks = batchSizeInBlocks; this.smartContractAddress = smartContractAddress; this.fromBlock = fromBlock; + this.currentBlock = fromBlock; this.toBlock = toBlock; this.events = events; events.forEach( event -> this.cache.put( event, new ArrayList<>() ) ); @@ -106,7 +108,7 @@ private void createSchema() { public void startCaching() { log.warn( "start to cache" ); - BigInteger currentBlock = fromBlock; + currentBlock = fromBlock; while ( currentBlock.compareTo( toBlock ) <= 0 ) { BigInteger endBlock = currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ); @@ -128,7 +130,7 @@ public void startCaching() { } String tableName = event.getName().toLowerCase(); - EventCacheManager.getInstance().writeToStore( tableName, cache.get( event ) ); // write the event into the store // todo add table name // (T): Question -> e.g "delegateChanged"? + EventCacheManager.getInstance().writeToStore( tableName, cache.get( event ) ); // write the event into the store cache.get( event ).clear(); // clear cache batch } @@ -155,7 +157,7 @@ public void addToCache( Event event, BigInteger startBlock, BigInteger endBlock Log rawLog = (Log) rawLogResult.get(); List structuredLog = new ArrayList<>(); - // Add all indexed values first + // Add all indexed values first (topics) for ( int i = 0; i < event.getParameters().size(); i++ ) { TypeReference paramType = event.getParameters().get( i ); if ( paramType.isIndexed() ) { @@ -163,7 +165,7 @@ public void addToCache( Event event, BigInteger startBlock, BigInteger endBlock } } - // Then add all non-indexed values + // Then add all non-indexed values (data) int nonIndexedPosition = 0; // Separate index for non-indexed parameters for ( int i = 0; i < event.getParameters().size(); i++ ) { TypeReference paramType = event.getParameters().get( i ); @@ -182,8 +184,6 @@ public void addToCache( Event event, BigInteger startBlock, BigInteger endBlock structuredLog.add( rawLog.getBlockNumber() ); structuredLog.add( rawLog.getAddress() ); - // Add other log information as needed - structuredLogs.add( structuredLog ); } @@ -197,7 +197,7 @@ public void addToCache( Event event, BigInteger startBlock, BigInteger endBlock // Directly write to store. How? // 1. call getLogs method which returns logs // 2. write it directly to the store: writeToStore( getLogs() ) - // This can be done synchronously. David thinks this method is good for my project. This means we don't need the cash Hashmap anymore. + // This can be done synchronously. David thinks this method is good for my project. This means we don't need the cache Hashmap anymore. // or (again using a little bit of memory) // use also a hashmap like above, write them into the map (like right now) but this time use multithreading @@ -222,25 +222,25 @@ private Object extractNonIndexedValue( Log rawLog, TypeReference paramType, i } - static PolyType convertToPolyType( String ethereumType ) { - if ( ethereumType.startsWith( "uint" ) || ethereumType.startsWith( "int" ) ) { - // Ethereum's uint and int types map to BIGINT in PolyType - return PolyType.BIGINT; - } else if ( ethereumType.startsWith( "bytes" ) || ethereumType.equals( "string" ) || ethereumType.equals( "address" ) ) { - // Ethereum's bytes, string and address types map to VARCHAR in PolyType - return PolyType.VARCHAR; - } else if ( ethereumType.equals( "bool" ) ) { - // Ethereum's bool type maps to BOOLEAN in PolyType - return PolyType.BOOLEAN; + public CachingStatus getStatus() { + CachingStatus status = new CachingStatus(); + BigInteger totalBlocks = toBlock.subtract(fromBlock).add(BigInteger.ONE); + + if (currentBlock.add(BigInteger.valueOf(batchSizeInBlocks)).compareTo(toBlock) > 0) { + status.percent = 100; + status.state = CachingStatus.ProcessingState.DONE; } else { - // If the type is unknown, use VARCHAR as a general type - return PolyType.VARCHAR; - } - } + BigInteger processedBlocks = currentBlock.subtract(fromBlock); + status.percent = processedBlocks.floatValue() / totalBlocks.floatValue() * 100; + if (status.percent == 0) { + status.state = CachingStatus.ProcessingState.INITIALIZED; + } else { + status.state = CachingStatus.ProcessingState.PROCESSING; + } + } - public CachingStatus getStatus() { - throw new NotImplementedException(); + return status; } } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 21a356e39e..86e8c64e64 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -153,7 +153,7 @@ private Transaction getTransaction() { void writeToStore( String tableName, List> logResults ) { - if (logResults.isEmpty()) { + if ( logResults.isEmpty() ) { return; } Transaction transaction = getTransaction(); @@ -175,8 +175,6 @@ void writeToStore( String tableName, List> logResults ) { AlgRoot root = AlgRoot.of( node, Kind.INSERT ); // Wrap the node into an AlgRoot as required by Polypheny // Add the dynamic parameters to the context - // don't add if value = 0 - // TODO: Correctly fill in the dynamic parameters with the correct information from the event (event.getIndexedParameters().get( i++ ).toString()) int i = 0; for ( AlgDataTypeField field : rowType.getFieldList() ) { long idx = field.getIndex(); @@ -186,20 +184,15 @@ void writeToStore( String tableName, List> logResults ) { List fieldValues = new ArrayList<>(); for ( List logResult : logResults ) { Object value = logResult.get( i ); - Object processedValue; - // temporarily + // todo: converting to long (-2^63-1 till 2^63-1) from uint256 (2^256-1) if data is greater than 2^63-1 + // how to convert it to bigint? Is there a Poltype that can handle unit256? Double? + // is Bigint 64-bit signed integer? if ( value instanceof Address ) { - processedValue = value.toString(); + value = value.toString(); } else if ( value instanceof Uint256 ) { - processedValue = ((Uint256) value).getValue() == null ? null : ((Uint256) value).getValue().longValue() ; - } else if ( value instanceof BigInteger ) { - processedValue = value == null ? null : ((BigInteger) value).longValue(); // Already a BigInteger - } else if ( value instanceof Boolean ) { - processedValue = value; // No need to convert boolean - } else { - processedValue = value.toString(); // handle other types as needed + value = ((Uint256) value).getValue() == null ? null : ((Uint256) value).getValue().longValue(); } - fieldValues.add( processedValue ); + fieldValues.add( value ); } i++; statement.getDataContext().addParameterValues( idx, type, fieldValues ); // take the correct indexedParameters - at the moment we only add one row at a time, could refactor to add the whole batch From f57daec751bd9a24ad22d2e4c29f6085d047725b Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Tue, 15 Aug 2023 11:11:56 +0200 Subject: [PATCH 11/22] Add multiple smart contracts handling in cache --- .../db/adapter/ethereum/ContractCache.java | 142 ++++++++++++++++++ .../db/adapter/ethereum/EthereumPlugin.java | 117 +++++++++++++-- .../db/adapter/ethereum/EthereumSchema.java | 2 +- .../db/adapter/ethereum/EventCache.java | 118 ++------------- .../adapter/ethereum/EventCacheManager.java | 10 +- .../db/adapter/ethereum/EventData.java | 9 +- 6 files changed, 274 insertions(+), 124 deletions(-) create mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java new file mode 100644 index 0000000000..bb312dcaf7 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java @@ -0,0 +1,142 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.DataSource.ExportedColumn; +import org.polypheny.db.ddl.DdlManager.FieldInformation; +import org.web3j.abi.EventEncoder; +import org.web3j.abi.FunctionReturnDecoder; +import org.web3j.abi.TypeReference; +import org.web3j.abi.datatypes.Event; +import org.web3j.abi.datatypes.Type; +import org.web3j.protocol.Web3j; +import org.web3j.protocol.core.DefaultBlockParameter; +import org.web3j.protocol.core.methods.request.EthFilter; +import org.web3j.protocol.core.methods.response.EthLog; +import org.web3j.protocol.core.methods.response.Log; +import org.web3j.protocol.http.HttpService; + +@Slf4j +public class ContractCache { + + public final int sourceAdapterId; + private final int targetAdapterId; + private final Map> columns; + private final int batchSizeInBlocks; + private final BigInteger fromBlock; + private final BigInteger toBlock; + private BigInteger currentBlock; + + private final Map cache = new ConcurrentHashMap<>(); // a cache for each event + private final Map> eventsPerContract; + protected final Web3j web3j; + + + public ContractCache( int sourceAdapterId, int targetAdapterId, String clientUrl, int batchSizeInBlocks, BigInteger fromBlock, BigInteger toBlock, Map> eventsPerContract, Map> columns ) { + this.sourceAdapterId = sourceAdapterId; + this.targetAdapterId = targetAdapterId; + this.columns = columns; + this.batchSizeInBlocks = batchSizeInBlocks; + this.fromBlock = fromBlock; + this.currentBlock = fromBlock; + this.toBlock = toBlock; + this.eventsPerContract = eventsPerContract; + this.web3j = Web3j.build( new HttpService( clientUrl ) ); + eventsPerContract.forEach( ( address, events ) -> this.cache.put( address, new EventCache( events, web3j ) ) ); + } + + + public void initializeCaching() { + // register table in schema + this.createSchema(); + // start caching + this.startCaching(); + } + + + private void createSchema() { + log.warn( "start to create schema" ); + columns.remove( "block" ); + columns.remove( "transaction" ); + // TODO: block and trx columns are also included. Remove? + Map> columnInformations = columns.entrySet() + .stream() + .collect( + Collectors.toMap( + table -> EthereumPlugin.HIDDEN_PREFIX + table.getKey(), // we prepend this to hide the table to the user + table -> table.getValue() + .stream() + .map( ExportedColumn::toFieldInformation ) + .collect( Collectors.toList() ) ) ); + + EventCacheManager.getInstance().createTables( sourceAdapterId, columnInformations, targetAdapterId ); + } + + + public void startCaching() { + log.warn( "start to cache" ); + currentBlock = fromBlock; + + while ( currentBlock.compareTo( toBlock ) <= 0 ) { + BigInteger endBlock = currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ); + if ( endBlock.compareTo( toBlock ) > 0 ) { + endBlock = toBlock; + } + + log.warn( "from-to: " + currentBlock + " to " + endBlock ); // in production: instead of .warn take .debug + + for ( Map.Entry entry : cache.entrySet() ) { + String address = entry.getKey(); + EventCache eventCache = entry.getValue(); + eventCache.addToCache( address, currentBlock, endBlock ); + } + + currentBlock = endBlock.add( BigInteger.ONE ); // avoid overlapping block numbers + } + } + + + public CachingStatus getStatus() { + CachingStatus status = new CachingStatus(); + BigInteger totalBlocks = toBlock.subtract( fromBlock ).add( BigInteger.ONE ); + + if ( currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ).compareTo( toBlock ) > 0 ) { + status.percent = 100; + status.state = CachingStatus.ProcessingState.DONE; + } else { + BigInteger processedBlocks = currentBlock.subtract( fromBlock ); + status.percent = processedBlocks.floatValue() / totalBlocks.floatValue() * 100; + + if ( status.percent == 0 ) { + status.state = CachingStatus.ProcessingState.INITIALIZED; + } else { + status.state = CachingStatus.ProcessingState.PROCESSING; + } + } + + return status; + } + +} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index bae7eccc65..e2a878b809 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -29,6 +29,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.Callable; import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -131,6 +132,8 @@ public static class EthereumDataSource extends DataSource { private EthereumSchema currentSchema; @Getter private final String smartContractAddress; + @Getter + final List smartContractAddresses; private final String etherscanApiKey; @Getter private final BigInteger fromBlock; @@ -149,6 +152,7 @@ public EthereumDataSource( final int storeId, final String uniqueName, final Map this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); this.smartContractAddress = settings.get( "SmartContractAddress" ); // Event Data; Add smartContractAddress to EDataSource + this.smartContractAddresses = Arrays.asList( "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984", "0x6b175474e89094c44da98b954eedeac495271d0f" ); // todo: get from adapter settings this.etherscanApiKey = settings.get( "EtherscanApiKey" ); this.fromBlock = new BigInteger( settings.get( "fromBlock" ) ); this.toBlock = new BigInteger( settings.get( "toBlock" ) ); @@ -236,12 +240,14 @@ public Map> getExportedColumns() { throw new RuntimeException( e ); } try { - List events = eventDataMap.values().stream() - .map( EventData::getEvent ) - .collect( Collectors.toList() ); + Map> eventsPerContract = eventDataMap.values().stream() + .collect(Collectors.groupingBy( + EventData::getSmartContractAddress, + Collectors.toList() + )); CatalogAdapter cachingAdapter = Catalog.getInstance().getAdapter( cachingAdapterTargetName ); EventCacheManager.getInstance() - .register( getAdapterId(), cachingAdapter.id, clientURL, 50, smartContractAddress, fromBlock, toBlock, events, map ) + .register( getAdapterId(), cachingAdapter.id, clientURL, 50, fromBlock, toBlock, eventsPerContract, map ) .initializeCaching(); } catch ( UnknownAdapterException e ) { // If the specified adapter is not found, throw a RuntimeException @@ -347,6 +353,7 @@ protected List getEventsFromABI( String etherscanApiKey, String cont } catch ( IOException e ) { // todo: handle errors; for example no abi or internet connection etc. + log.warn( "GET EVENTS ERROR" ); throw new RuntimeException( e ); } @@ -362,6 +369,15 @@ protected Event getEventFromCatalogTable( String catalogTableName ) { } + protected String getSmartContractAddressFromCatalogTable( String catalogTableName ) { + if ( catalogTableName.equals( "block" ) || catalogTableName.equals( "transaction" ) ) { + return null; + } + return eventDataMap.get( catalogTableName ).getSmartContractAddress(); + + } + + private void createExportedColumns( String physicalTableName, Map> map, String[] columns, PolyType[] types ) { PolyType collectionsType = null; Integer length = 300; @@ -393,13 +409,29 @@ private void createExportedColumns( String physicalTableName, Map> map, String[] commonEventColumns, PolyType[] commonEventTypes ) { - // Event Data Dynamic Scheme - List contractEvents = getEventsFromABI( etherscanApiKey, smartContractAddress ); + for ( String address : smartContractAddresses ) { + // todo: API Rate Limits Etherscan. If called inside for loop it can cause error + String contractName = null; + List contractEvents = null; + try { + contractName = callWithExponentialBackoff(() -> getContractName(address)); + contractEvents = callWithExponentialBackoff(() -> getEventsFromABI(etherscanApiKey, address)); + } catch ( Exception e ) { + throw new RuntimeException( e ); + } - for ( JSONObject event : contractEvents ) { - String eventName = event.getString( "name" ); // to match it later with catalogTable.name - JSONArray abiInputs = event.getJSONArray( "inputs" ); // indexed and non-indexed values (topics + data) - eventDataMap.put( eventName.toLowerCase(), new EventData( eventName, abiInputs ) ); + // String contractName = getContractName( address ); + // List contractEvents = getEventsFromABI( etherscanApiKey, address ); + + for ( JSONObject event : contractEvents ) { + if ( event.getBoolean( "anonymous" ) ) { + continue; + } + String eventName = event.getString( "name" ); // to match it later with catalogTable.name + String compositeKey = contractName + "_" + eventName; // e.g. Uni_Transfer & Dai_Transfer + JSONArray abiInputs = event.getJSONArray( "inputs" ); // indexed and non-indexed values (topics + data) + eventDataMap.put( compositeKey.toLowerCase(), new EventData( eventName, contractName, address, abiInputs ) ); + } } PolyType collectionsType = null; @@ -409,7 +441,8 @@ private void createExportedColumnsForEvents( Map> m // Event Data: Creating columns for each event for specified smart contract based on ABI for ( Map.Entry eventEntry : eventDataMap.entrySet() ) { - String eventName = eventEntry.getValue().getOriginalKey(); // Get the original event name + // String eventName = eventEntry.getValue().getOriginalKey(); // Get the original event name + String compositeEventName = eventEntry.getValue().getCompositeName(); JSONArray abiInputs = eventEntry.getValue().getAbiInputs(); // Get the data List eventDataCols = new ArrayList<>(); int inputPosition = 0; @@ -428,7 +461,7 @@ private void createExportedColumnsForEvents( Map> m cardinality, false, SCHEMA_NAME, - eventName, // event name + compositeEventName, // event name col, inputPosition, inputPosition == 0 @@ -450,7 +483,7 @@ private void createExportedColumnsForEvents( Map> m cardinality, false, SCHEMA_NAME, - eventName, // event name + compositeEventName, // event name columnName, inputPosition, inputPosition == 0 @@ -458,8 +491,42 @@ private void createExportedColumnsForEvents( Map> m inputPosition++; } - map.put( eventName, eventDataCols ); + map.put( compositeEventName, eventDataCols ); } + + } + + + private String getContractName( String contractAddress ) { + try { + URL url = new URL( "https://api.etherscan.io/api?module=contract&action=getsourcecode&address=" + contractAddress + "&apikey=" + etherscanApiKey ); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestMethod( "GET" ); + int responseCode = connection.getResponseCode(); + if ( responseCode == HttpURLConnection.HTTP_OK ) { + BufferedReader in = new BufferedReader( new InputStreamReader( connection.getInputStream() ) ); + String inputLine; + StringBuilder response = new StringBuilder(); + + while ( (inputLine = in.readLine()) != null ) { + response.append( inputLine ); + } + in.close(); + + JSONObject jsonObject = new JSONObject( response.toString() ); + JSONArray resultArray = jsonObject.getJSONArray( "result" ); // Get result array + if ( resultArray.length() > 0 ) { + JSONObject contractObject = resultArray.getJSONObject( 0 ); // Get the first object in result array + return contractObject.getString( "ContractName" ); // Return ContractName field + } + + } + + } catch ( IOException e ) { + // todo: handle errors; for example no abi or internet connection etc. + throw new RuntimeException( e ); + } + return null; } @@ -483,10 +550,32 @@ static PolyType convertToPolyType( String type ) { default: return null; } + } + + public T callWithExponentialBackoff( Callable callable) throws Exception { + int maxRetries = 5; + long waitTime = 1000; // 1 second + for (int retry = 0; retry < maxRetries; retry++) { + try { + return callable.call(); + } catch (Exception e) { + if (retry == maxRetries - 1) { + throw e; // If this was our last retry, rethrow the exception + } + try { + Thread.sleep(waitTime); + } catch (InterruptedException ie) { + Thread.currentThread().interrupt(); // Restore the interrupted status + } + waitTime *= 2; // Double the delay for the next retry + } + } + throw new Exception("Exponential backoff failed after " + maxRetries + " attempts."); } + } } \ No newline at end of file diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java index 1bc04c8016..ebc6136d6f 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java @@ -71,7 +71,7 @@ public Table createBlockchainTable( CatalogTable catalogTable, List>> cache = new ConcurrentHashMap<>(); // a cache for each event - private final List events; // maintain a list of events - private final String smartContractAddress; - private final BigInteger fromBlock; - private final BigInteger toBlock; - private BigInteger currentBlock; + private final Map>> cache = new ConcurrentHashMap<>(); // a cache for each event + private final List events; protected final Web3j web3j; - public final int sourceAdapterId; - private final Map> columns; - private final int targetAdapterId; - - public EventCache( int sourceAdapterId, int targetAdapterId, String clientUrl, int batchSizeInBlocks, String smartContractAddress, BigInteger fromBlock, BigInteger toBlock, List events, Map> columns ) { - this.sourceAdapterId = sourceAdapterId; - this.targetAdapterId = targetAdapterId; - this.columns = columns; - this.batchSizeInBlocks = batchSizeInBlocks; - this.smartContractAddress = smartContractAddress; - this.fromBlock = fromBlock; - this.currentBlock = fromBlock; - this.toBlock = toBlock; + public EventCache( List events, Web3j web3j ) { + this.web3j = web3j; this.events = events; events.forEach( event -> this.cache.put( event, new ArrayList<>() ) ); - this.web3j = Web3j.build( new HttpService( clientUrl ) ); - } - - // jede event hat ein cache evtl -> evtl einfacher für logik - - - public void initializeCaching() { - // register table in schema - this.createSchema(); - // start caching - this.startCaching(); - } - - - // In this method, we create the appropriate schemas and tables in the catalog. (see also createTable) - private void createSchema() { - log.warn( "start to create schema" ); - columns.remove( "block" ); - columns.remove( "transaction" ); - // TODO: block and trx columns are also included. Remove? - Map> columnInformations = columns.entrySet() - .stream() - .collect( - Collectors.toMap( - table -> EthereumPlugin.HIDDEN_PREFIX + table.getKey(), // we prepend this to hide the table to the user - table -> table.getValue() - .stream() - .map( ExportedColumn::toFieldInformation ) - .collect( Collectors.toList() ) ) ); - - EventCacheManager.getInstance().createTables( sourceAdapterId, columnInformations, targetAdapterId ); } - public void startCaching() { - log.warn( "start to cache" ); - currentBlock = fromBlock; - - while ( currentBlock.compareTo( toBlock ) <= 0 ) { - BigInteger endBlock = currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ); - if ( endBlock.compareTo( toBlock ) > 0 ) { - endBlock = toBlock; - } - - log.warn( "from-to: " + currentBlock + " to " + endBlock ); // in production: instead of .warn take .debug - - // for each event fetch logs from block x to block y according to batchSizeInBlocks - for ( Event event : events ) { - addToCache( event, currentBlock, endBlock ); + public void addToCache( String address, BigInteger startBlock, BigInteger endBlock ) { + for ( EventData event : events ) { + addLogsToCache( address, event, startBlock, endBlock ); + if ( cache.get( event ).size() == 0 ) { + continue; } - - // just another loop for debugging reasons. I will put it in the first loop later on. - for ( Event event : events ) { - if ( cache.get( event ).size() == 0 ) { - continue; - } - - String tableName = event.getName().toLowerCase(); - EventCacheManager.getInstance().writeToStore( tableName, cache.get( event ) ); // write the event into the store - cache.get( event ).clear(); // clear cache batch - } - - currentBlock = endBlock.add( BigInteger.ONE ); // avoid overlapping block numbers + EventCacheManager.getInstance().writeToStore( event.getCompositeName(), cache.get( event ) ); // write the event into the store + cache.get( event ).clear(); // clear cache batch } } - public void addToCache( Event event, BigInteger startBlock, BigInteger endBlock ) { + private void addLogsToCache( String address, EventData eventData, BigInteger startBlock, BigInteger endBlock ) { EthFilter filter = new EthFilter( DefaultBlockParameter.valueOf( startBlock ), DefaultBlockParameter.valueOf( endBlock ), - smartContractAddress + address ); + Event event = eventData.getEvent(); filter.addSingleTopic( EventEncoder.encode( event ) ); try { @@ -188,7 +121,7 @@ public void addToCache( Event event, BigInteger startBlock, BigInteger endBlock } // If cache is a Map>>, you can store structuredLogs as follows - cache.put( event, structuredLogs ); + cache.put( eventData, structuredLogs ); // We are still writing to memory with logs & .addAll. Right now we will use the memory space. //cache.get( event ).addAll( rawLogs ); @@ -222,26 +155,5 @@ private Object extractNonIndexedValue( Log rawLog, TypeReference paramType, i } - public CachingStatus getStatus() { - CachingStatus status = new CachingStatus(); - BigInteger totalBlocks = toBlock.subtract(fromBlock).add(BigInteger.ONE); - - if (currentBlock.add(BigInteger.valueOf(batchSizeInBlocks)).compareTo(toBlock) > 0) { - status.percent = 100; - status.state = CachingStatus.ProcessingState.DONE; - } else { - BigInteger processedBlocks = currentBlock.subtract(fromBlock); - status.percent = processedBlocks.floatValue() / totalBlocks.floatValue() * 100; - - if (status.percent == 0) { - status.state = CachingStatus.ProcessingState.INITIALIZED; - } else { - status.state = CachingStatus.ProcessingState.PROCESSING; - } - } - - return status; - } - } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 86e8c64e64..9a46c861fc 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -74,7 +74,7 @@ public class EventCacheManager implements Runnable { // concurrent map, which maintains multiple caches, which correspond to the adapter which requested the caches // to allow multiple threads to read and modify; keys: adapterId, value: EventCache (T) - public Map caches = new ConcurrentHashMap<>(); + public Map caches = new ConcurrentHashMap<>(); /** @@ -105,15 +105,15 @@ private EventCacheManager( TransactionManager transactionManager ) { } - public EventCache register( int sourceAdapterId, int targetAdapterId, String clientUrl, int batchSizeInBlocks, String smartContractAddress, BigInteger fromBlock, BigInteger toBlock, List events, Map> map ) { - EventCache cache = new EventCache( sourceAdapterId, targetAdapterId, clientUrl, batchSizeInBlocks, smartContractAddress, fromBlock, toBlock, events, map ); + public ContractCache register( int sourceAdapterId, int targetAdapterId, String clientUrl, int batchSizeInBlocks, BigInteger fromBlock, BigInteger toBlock, Map> eventsPerContract, Map> map ) { + ContractCache cache = new ContractCache( sourceAdapterId, targetAdapterId, clientUrl, batchSizeInBlocks, fromBlock, toBlock, eventsPerContract, map ); this.caches.put( sourceAdapterId, cache ); return cache; } @Nullable - public EventCache getCache( int adapterId ) { + public ContractCache getCache( int adapterId ) { return caches.get( adapterId ); } @@ -214,7 +214,7 @@ void writeToStore( String tableName, List> logResults ) { private Map getAllStreamStatus() { // return status of process - return caches.values().stream().collect( Collectors.toMap( c -> c.sourceAdapterId, EventCache::getStatus ) ); + return caches.values().stream().collect( Collectors.toMap( c -> c.sourceAdapterId, ContractCache::getStatus ) ); } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java index 131800e709..793b8332f9 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java @@ -39,17 +39,24 @@ public class EventData { @Getter private Event event; @Getter + private String smartContractAddress; + @Getter + private String compositeName; + @Getter private JSONArray abiInputs; - public EventData( String originalKey, JSONArray abiInputs ) { + public EventData( String originalKey, String contractName, String smartContractAddress, JSONArray abiInputs ) { this.originalKey = originalKey; this.lowercaseKey = originalKey.toLowerCase(); + this.compositeName = contractName.toLowerCase() + "_" + originalKey.toLowerCase(); this.abiInputs = abiInputs; List> typeReferences = createTypeReferences( abiInputs ); this.event = new Event( originalKey, typeReferences ); // create event based on event name (original key and inputs) + this.smartContractAddress = smartContractAddress; } + private static List> createTypeReferences( JSONArray abiInputs ) { List> typeReferences = new ArrayList<>(); for ( int i = 0; i < abiInputs.length(); i++ ) { From 14c0b919e0d9b7f0c1e106687cb2902bfb772dc3 Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Thu, 17 Aug 2023 00:38:22 +0200 Subject: [PATCH 12/22] Fix minor adjustments and refinements --- .../db/adapter/ethereum/ContractCache.java | 3 - .../adapter/ethereum/EthereumDataSource.java | 540 ++++++++++++++++++ .../db/adapter/ethereum/EthereumPlugin.java | 516 ----------------- .../db/adapter/ethereum/EthereumSchema.java | 1 - .../db/adapter/ethereum/EthereumTable.java | 1 - .../db/adapter/ethereum/EventCache.java | 102 ++-- .../adapter/ethereum/EventCacheManager.java | 10 +- .../db/adapter/ethereum/EventDataReader.java | 2 +- 8 files changed, 594 insertions(+), 581 deletions(-) create mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java index bb312dcaf7..2747a52282 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java @@ -78,9 +78,6 @@ public void initializeCaching() { private void createSchema() { log.warn( "start to create schema" ); - columns.remove( "block" ); - columns.remove( "transaction" ); - // TODO: block and trx columns are also included. Remove? Map> columnInformations = columns.entrySet() .stream() .collect( diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java new file mode 100644 index 0000000000..6dbea8b501 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java @@ -0,0 +1,540 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.math.BigInteger; +import java.net.HttpURLConnection; +import java.net.URL; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.stream.Collectors; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.json.JSONArray; +import org.json.JSONObject; +import org.pf4j.Extension; +import org.polypheny.db.adapter.Adapter.AdapterProperties; +import org.polypheny.db.adapter.Adapter.AdapterSettingBoolean; +import org.polypheny.db.adapter.Adapter.AdapterSettingInteger; +import org.polypheny.db.adapter.Adapter.AdapterSettingString; +import org.polypheny.db.adapter.DataSource; +import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogAdapter; +import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; +import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.UnknownAdapterException; +import org.polypheny.db.information.InformationGroup; +import org.polypheny.db.information.InformationTable; +import org.polypheny.db.prepare.Context; +import org.polypheny.db.schema.Schema; +import org.polypheny.db.schema.SchemaPlus; +import org.polypheny.db.schema.Table; +import org.polypheny.db.transaction.PolyXid; +import org.polypheny.db.type.PolyType; +import org.web3j.abi.datatypes.Event; +import org.web3j.protocol.Web3j; +import org.web3j.protocol.http.HttpService; + +@Slf4j +@Extension +@AdapterProperties( + name = "Ethereum", + description = "An adapter for querying the Ethereum blockchain. It uses the ethereum JSON-RPC API. Currently, this adapter only supports read operations.", + usedModes = DeployMode.REMOTE) +@AdapterSettingString(name = "ClientUrl", description = "The URL of the ethereum JSON RPC client", defaultValue = "https://mainnet.infura.io/v3/4d06589e97064040b5da99cf4051ef04", position = 1) +@AdapterSettingInteger(name = "Blocks", description = "The number of Blocks to fetch when processing a query", defaultValue = 10, position = 2, modifiable = true) +@AdapterSettingBoolean(name = "ExperimentalFiltering", description = "Experimentally filter Past Block", defaultValue = false, position = 3, modifiable = true) +@AdapterSettingString(name = "SmartContractAddresses", description = "Comma sepretaed addresses of the smart contracts", defaultValue = "0x6b175474e89094c44da98b954eedeac495271d0f, 0x6b175474e89094c44da98b954eedeac495271d0f", position = 4, modifiable = true) // Event Data: Add annotation +@AdapterSettingString(name = "EtherscanApiKey", description = "Etherscan API Token", defaultValue = "PJBVZ3BE1AI5AKIMXGK1HNC59PCDH7CQSP", position = 5, modifiable = true) // Event Data: Add annotation +@AdapterSettingString(name = "fromBlock", description = "Fetch block from (Smart Contract)", defaultValue = "17669045", position = 6, modifiable = true) +@AdapterSettingString(name = "toBlock", description = "Fetch block to (Smart Contract)", defaultValue = "17669155", position = 7, modifiable = true) +@AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = true, position = 8, modifiable = true) +@AdapterSettingString(name = "CachingAdapterTargetName", description = "Adapter Target Name", defaultValue = "hsqldb", position = 9, modifiable = true) // todo DL: list +public class EthereumDataSource extends DataSource { + + public static final String SCHEMA_NAME = "public"; + private String clientURL; + @Getter + private int blocks; + @Getter + private boolean experimentalFiltering; + private EthereumSchema currentSchema; + @Getter + final List smartContractAddresses; + private final String etherscanApiKey; + @Getter + private final BigInteger fromBlock; + @Getter + private final BigInteger toBlock; + private final Map eventDataMap; + private Boolean caching; + private String cachingAdapterTargetName; + + private Map> map; + + + // todo: take it out + public EthereumDataSource( final int storeId, final String uniqueName, final Map settings ) { + super( storeId, uniqueName, settings, true ); + setClientURL( settings.get( "ClientUrl" ) ); + this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); + this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); + String smartContractAddressesStr = settings.get( "SmartContractAddresses" ); + List smartContractAddresses = Arrays.stream( smartContractAddressesStr.split( "," ) ) + .map( String::trim ) + .collect( Collectors.toList() ); + this.smartContractAddresses = smartContractAddresses; // Event Data; Add smartContractAddress to EDataSource + // this.smartContractAddresses = Arrays.asList( "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984", "0x6b175474e89094c44da98b954eedeac495271d0f" ); // todo: get from adapter settings + this.etherscanApiKey = settings.get( "EtherscanApiKey" ); + this.fromBlock = new BigInteger( settings.get( "fromBlock" ) ); + this.toBlock = new BigInteger( settings.get( "toBlock" ) ); + this.eventDataMap = new HashMap<>(); + this.caching = Boolean.parseBoolean( settings.get( "Caching" ) ); + this.cachingAdapterTargetName = settings.get( "CachingAdapterTargetName" ); + new Thread( () -> { + createInformationPage(); + enableInformationPage(); + } ).start(); + + //createInformationPage(); + //enableInformationPage(); + } + + + private void setClientURL( String clientURL ) { + Web3j web3j = Web3j.build( new HttpService( clientURL ) ); + try { + BigInteger latest = web3j.ethBlockNumber().send().getBlockNumber(); + } catch ( Exception e ) { + throw new RuntimeException( "Unable to connect the client URL '" + clientURL + "'" ); + } + web3j.shutdown(); + this.clientURL = clientURL; + } + + + @Override + public void createNewSchema( SchemaPlus rootSchema, String name ) { + currentSchema = new EthereumSchema( this.clientURL ); + } + + + @Override + public Table createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { + return currentSchema.createBlockchainTable( combinedTable, columnPlacementsOnStore, this ); + } + + + @Override + public Schema getCurrentSchema() { + return currentSchema; + } + + + @Override + public void truncate( Context context, CatalogTable table ) { + throw new RuntimeException( "Blockchain adapter does not support truncate" ); + } + + + @Override + public Map> getExportedColumns() { + log.warn( "getExportedColumn" ); + // Ensure that this block of code is called only once by checking if 'map' is null before proceeding + if ( map != null ) { + return map; + } + + Map> map = new HashMap<>(); + + String[] blockColumns = { "number", "hash", "parent_hash", "nonce", "sha3uncles", "logs_bloom", "transactions_root", "state_root", "receipts_root", "author", "miner", "mix_hash", "difficulty", "total_difficulty", "extra_data", "size", "gas_limit", "gas_used", "timestamp" }; + PolyType[] blockTypes = { PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.TIMESTAMP }; + createExportedColumns( "block", map, blockColumns, blockTypes ); + + String[] transactionColumns = { "hash", "nonce", "block_hash", "block_number", "transaction_index", "from", "to", "value", "gas_price", "gas", "input", "creates", "public_key", "raw", "r", "s" }; + PolyType[] transactionTypes = { PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR }; + createExportedColumns( "transaction", map, transactionColumns, transactionTypes ); + + String[] commonEventColumns = { "removed", "log_index", "transaction_index", "transaction_hash", "block_hash", "block_number", "address" }; + PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR }; + createExportedColumnsForEvents( map, commonEventColumns, commonEventTypes ); + + if ( caching == Boolean.TRUE ) { + // Disable caching to prevent multiple unnecessary attempts to cache the same data. + caching = false; + this.map = map; + Map> columns = new HashMap<>( map ); // create new map instance for caching + columns.remove( "block" ); + columns.remove( "transaction" ); + + // todo DL: fix concurrency issues (dirty solution right now) + new Thread( () -> { + try { + Thread.sleep( 1200 ); + } catch ( InterruptedException e ) { + throw new RuntimeException( e ); + } + try { + Map> eventsPerContract = eventDataMap.values().stream() + .collect( Collectors.groupingBy( + EventData::getSmartContractAddress, + Collectors.toList() + ) ); + CatalogAdapter cachingAdapter = Catalog.getInstance().getAdapter( cachingAdapterTargetName ); + EventCacheManager.getInstance() + .register( getAdapterId(), cachingAdapter.id, clientURL, 50, fromBlock, toBlock, eventsPerContract, columns ) + .initializeCaching(); + } catch ( UnknownAdapterException e ) { + // If the specified adapter is not found, throw a RuntimeException + throw new RuntimeException( e ); + } + } ).start(); + } + + return map; + } + + + @Override + public boolean prepare( PolyXid xid ) { + log.debug( "Blockchain Store does not support prepare()." ); + return true; + } + + + @Override + public void commit( PolyXid xid ) { + log.debug( "Blockchain Store does not support commit()." ); + } + + + @Override + public void rollback( PolyXid xid ) { + log.debug( "Blockchain Store does not support rollback()." ); + } + + + @Override + public void shutdown() { + removeInformationPage(); + } + + + @Override + protected void reloadSettings( List updatedSettings ) { + if ( updatedSettings.contains( "ClientUrl" ) ) { + setClientURL( settings.get( "ClientUrl" ) ); + } + if ( updatedSettings.contains( "Blocks" ) ) { + this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); + } + if ( updatedSettings.contains( "ExperimentalFiltering" ) ) { + this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); + } + } + + + protected void createInformationPage() { + for ( Map.Entry> entry : getExportedColumns().entrySet() ) { + InformationGroup group = new InformationGroup( + informationPage, + entry.getValue().get( 0 ).physicalSchemaName + "." + entry.getValue().get( 0 ).physicalTableName ); + + InformationTable table = new InformationTable( + group, + Arrays.asList( "Position", "Column Name", "Type", "Primary" ) ); + for ( ExportedColumn exportedColumn : entry.getValue() ) { + table.addRow( + exportedColumn.physicalPosition, + exportedColumn.name, + exportedColumn.getDisplayType(), + exportedColumn.primary ? "✔" : "" + ); + } + informationElements.add( table ); + informationGroups.add( group ); + } + } + + + private void createExportedColumns( String physicalTableName, Map> map, String[] columns, PolyType[] types ) { + PolyType collectionsType = null; + Integer length = 300; + Integer scale = null; + Integer dimension = null; + Integer cardinality = null; + int position = 0; + List cols = new ArrayList<>(); + for ( String col : columns ) { + cols.add( new ExportedColumn( + col, + types[position], + collectionsType, + length, + scale, + dimension, + cardinality, + false, + SCHEMA_NAME, + physicalTableName, + col, + position, + position == 0 ) ); + position++; + + } + map.put( physicalTableName, cols ); + } + + + private void createExportedColumnsForEvents( Map> map, String[] commonEventColumns, PolyType[] commonEventTypes ) { + for ( String address : smartContractAddresses ) { + // todo: API Rate Limits Etherscan. If called inside for loop it can cause error + String contractName = null; + List contractEvents = null; + try { + contractName = callWithExponentialBackoff( () -> getContractName( address ) ); + contractEvents = callWithExponentialBackoff( () -> getEventsFromABI( etherscanApiKey, address ) ); + } catch ( Exception e ) { + throw new RuntimeException( e ); + } + + // String contractName = getContractName( address ); + // List contractEvents = getEventsFromABI( etherscanApiKey, address ); + + for ( JSONObject event : contractEvents ) { + if ( event.getBoolean( "anonymous" ) ) { + continue; + } + String eventName = event.getString( "name" ); // to match it later with catalogTable.name + String compositeKey = contractName + "_" + eventName; // e.g. Uni_Transfer & Dai_Transfer + JSONArray abiInputs = event.getJSONArray( "inputs" ); // indexed and non-indexed values (topics + data) + eventDataMap.put( compositeKey.toLowerCase(), new EventData( eventName, contractName, address, abiInputs ) ); + } + } + + PolyType collectionsType = null; + Integer scale = null; + Integer dimension = null; + Integer cardinality = null; + + // Event Data: Creating columns for each event for specified smart contract based on ABI + for ( Map.Entry eventEntry : eventDataMap.entrySet() ) { + // String eventName = eventEntry.getValue().getOriginalKey(); // Get the original event name + String compositeEventName = eventEntry.getValue().getCompositeName(); + JSONArray abiInputs = eventEntry.getValue().getAbiInputs(); // Get the data + List eventDataCols = new ArrayList<>(); + int inputPosition = 0; + + for ( int i = 0; i < abiInputs.length(); i++ ) { + JSONObject inputObject = abiInputs.getJSONObject( i ); + String col = inputObject.getString( "name" ); + PolyType type = convertToPolyType( inputObject.getString( "type" ) ); // convert event types to polytype + eventDataCols.add( new ExportedColumn( + col, + type, + collectionsType, + getLengthForType( type ), + scale, + dimension, + cardinality, + false, + SCHEMA_NAME, + compositeEventName, // event name + col, + inputPosition, + inputPosition == 0 + ) ); + inputPosition++; + } + + // Adding common columns + for ( int i = 0; i < commonEventColumns.length; i++ ) { + String columnName = commonEventColumns[i]; + PolyType columnType = commonEventTypes[i]; + eventDataCols.add( new ExportedColumn( + columnName, + columnType, + collectionsType, + getLengthForType( columnType ), + scale, + dimension, + cardinality, + false, + SCHEMA_NAME, + compositeEventName, // event name + columnName, + inputPosition, + inputPosition == 0 + ) ); + inputPosition++; + } + + map.put( compositeEventName, eventDataCols ); + } + + } + + + protected List getEventsFromABI( String etherscanApiKey, String contractAddress ) { + List events = new ArrayList<>(); + try { + URL url = new URL( "https://api.etherscan.io/api?module=contract&action=getabi&address=" + contractAddress + "&apikey=" + etherscanApiKey ); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestMethod( "GET" ); + int responseCode = connection.getResponseCode(); + if ( responseCode == HttpURLConnection.HTTP_OK ) { + BufferedReader in = new BufferedReader( new InputStreamReader( connection.getInputStream() ) ); + String inputLine; + StringBuilder response = new StringBuilder(); + + while ( (inputLine = in.readLine()) != null ) { + response.append( inputLine ); + } + in.close(); + + JSONObject jsonObject = new JSONObject( response.toString() ); + String abi = jsonObject.getString( "result" ); + JSONArray abiArray = new JSONArray( abi ); // Convert ABI string to JSON Array + for ( int i = 0; i < abiArray.length(); i++ ) { + JSONObject obj = abiArray.getJSONObject( i ); + // Check if the current object is an event + if ( obj.getString( "type" ).equals( "event" ) ) { + events.add( obj ); + } + } + } + + } catch ( IOException e ) { + // todo: handle errors; for example no abi or internet connection etc. + log.warn( "GET EVENTS ERROR" ); + throw new RuntimeException( e ); + } + + return events; + } + + + private String getContractName( String contractAddress ) { + try { + URL url = new URL( "https://api.etherscan.io/api?module=contract&action=getsourcecode&address=" + contractAddress + "&apikey=" + etherscanApiKey ); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestMethod( "GET" ); + int responseCode = connection.getResponseCode(); + if ( responseCode == HttpURLConnection.HTTP_OK ) { + BufferedReader in = new BufferedReader( new InputStreamReader( connection.getInputStream() ) ); + String inputLine; + StringBuilder response = new StringBuilder(); + + while ( (inputLine = in.readLine()) != null ) { + response.append( inputLine ); + } + in.close(); + + JSONObject jsonObject = new JSONObject( response.toString() ); + JSONArray resultArray = jsonObject.getJSONArray( "result" ); // Get result array + if ( resultArray.length() > 0 ) { + JSONObject contractObject = resultArray.getJSONObject( 0 ); // Get the first object in result array + return contractObject.getString( "ContractName" ); // Return ContractName field + } + + } + + } catch ( IOException e ) { + // todo: handle errors; for example no abi or internet connection etc. + throw new RuntimeException( e ); + } + return null; + } + + + protected Event getEventFromCatalogTable( String catalogTableName ) { + if ( catalogTableName.equals( "block" ) || catalogTableName.equals( "transaction" ) ) { + return null; + } + return eventDataMap.get( catalogTableName ).getEvent(); + } + + + protected String getSmartContractAddressFromCatalogTable( String catalogTableName ) { + if ( catalogTableName.equals( "block" ) || catalogTableName.equals( "transaction" ) ) { + return null; + } + return eventDataMap.get( catalogTableName ).getSmartContractAddress(); + + } + + + private Integer getLengthForType( PolyType type ) { + switch ( type ) { + case VARCHAR: + return 300; + default: + return null; + } + } + + + static PolyType convertToPolyType( String type ) { + // todo: convert all types in evm to polytype + switch ( type ) { + case "bool": + return PolyType.BOOLEAN; + case "address": + return PolyType.VARCHAR; + case "int": // 8 to 256... + case "uint256": + return PolyType.BIGINT; + default: + return null; + } + } + + + public T callWithExponentialBackoff( Callable callable ) throws Exception { + int maxRetries = 5; + long waitTime = 1000; // 1 second + + for ( int retry = 0; retry < maxRetries; retry++ ) { + try { + return callable.call(); + } catch ( Exception e ) { + if ( retry == maxRetries - 1 ) { + throw e; // If this was our last retry, rethrow the exception + } + try { + Thread.sleep( waitTime ); + } catch ( InterruptedException ie ) { + Thread.currentThread().interrupt(); // Restore the interrupted status + } + waitTime *= 2; // Double the delay for the next retry + } + } + throw new Exception( "Exponential backoff failed after " + maxRetries + " attempts." ); + } + + +} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index e2a878b809..5ee32f1d09 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -18,55 +18,11 @@ import com.google.common.collect.ImmutableMap; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.math.BigInteger; -import java.net.HttpURLConnection; -import java.net.URL; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; import java.util.Map; -import java.util.concurrent.Callable; -import java.util.stream.Collectors; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import org.json.JSONArray; -import org.json.JSONObject; -import org.pf4j.Extension; import org.pf4j.Plugin; import org.pf4j.PluginWrapper; -import org.polypheny.db.adapter.Adapter.AdapterProperties; -import org.polypheny.db.adapter.Adapter.AdapterSettingBoolean; -import org.polypheny.db.adapter.Adapter.AdapterSettingInteger; -import org.polypheny.db.adapter.Adapter.AdapterSettingString; -import org.polypheny.db.adapter.DataSource; -import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.catalog.Adapter; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogAdapter; -import org.polypheny.db.catalog.entity.CatalogColumnPlacement; -import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; -import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.catalog.exceptions.UnknownAdapterException; -import org.polypheny.db.information.InformationGroup; -import org.polypheny.db.information.InformationTable; -import org.polypheny.db.prepare.Context; import org.polypheny.db.processing.TransactionExtension; -import org.polypheny.db.schema.Schema; -import org.polypheny.db.schema.SchemaPlus; -import org.polypheny.db.schema.Table; -import org.polypheny.db.transaction.PolyXid; -import org.polypheny.db.type.PolyType; -import org.web3j.abi.TypeReference; -import org.web3j.abi.datatypes.Address; -import org.web3j.abi.datatypes.Event; -import org.web3j.abi.datatypes.Type; -import org.web3j.abi.datatypes.generated.Uint256; -import org.web3j.protocol.Web3j; -import org.web3j.protocol.http.HttpService; public class EthereumPlugin extends Plugin { @@ -106,476 +62,4 @@ public void stop() { } - @Slf4j - @Extension - @AdapterProperties( - name = "Ethereum", - description = "An adapter for querying the Ethereum blockchain. It uses the ethereum JSON-RPC API. Currently, this adapter only supports read operations.", - usedModes = DeployMode.REMOTE) - @AdapterSettingString(name = "ClientUrl", description = "The URL of the ethereum JSON RPC client", defaultValue = "https://mainnet.infura.io/v3/4d06589e97064040b5da99cf4051ef04", position = 1) - @AdapterSettingInteger(name = "Blocks", description = "The number of Blocks to fetch when processing a query", defaultValue = 10, position = 2, modifiable = true) - @AdapterSettingBoolean(name = "ExperimentalFiltering", description = "Experimentally filter Past Block", defaultValue = false, position = 3, modifiable = true) - @AdapterSettingString(name = "SmartContractAddress", description = "Address of the smart contract address", defaultValue = "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984", position = 4, modifiable = true) // Event Data: Add annotation - @AdapterSettingString(name = "EtherscanApiKey", description = "Etherscan API Token", defaultValue = "PJBVZ3BE1AI5AKIMXGK1HNC59PCDH7CQSP", position = 5, modifiable = true) // Event Data: Add annotation - @AdapterSettingString(name = "fromBlock", description = "Fetch block from (Smart Contract)", defaultValue = "17669045", position = 6, modifiable = true) - @AdapterSettingString(name = "toBlock", description = "Fetch block to (Smart Contract)", defaultValue = "17669155", position = 7, modifiable = true) - @AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = true, position = 8, modifiable = true) - @AdapterSettingString(name = "CachingAdapterTargetName", description = "Adapter Target Name", defaultValue = "hsqldb", position = 9, modifiable = true) - public static class EthereumDataSource extends DataSource { - - public static final String SCHEMA_NAME = "public"; - private String clientURL; - @Getter - private int blocks; - @Getter - private boolean experimentalFiltering; - private EthereumSchema currentSchema; - @Getter - private final String smartContractAddress; - @Getter - final List smartContractAddresses; - private final String etherscanApiKey; - @Getter - private final BigInteger fromBlock; - @Getter - private final BigInteger toBlock; - private final Map eventDataMap; - private Boolean caching; - private String cachingAdapterTargetName; - - private Map> map; - - - public EthereumDataSource( final int storeId, final String uniqueName, final Map settings ) { - super( storeId, uniqueName, settings, true ); - setClientURL( settings.get( "ClientUrl" ) ); - this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); - this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); - this.smartContractAddress = settings.get( "SmartContractAddress" ); // Event Data; Add smartContractAddress to EDataSource - this.smartContractAddresses = Arrays.asList( "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984", "0x6b175474e89094c44da98b954eedeac495271d0f" ); // todo: get from adapter settings - this.etherscanApiKey = settings.get( "EtherscanApiKey" ); - this.fromBlock = new BigInteger( settings.get( "fromBlock" ) ); - this.toBlock = new BigInteger( settings.get( "toBlock" ) ); - this.eventDataMap = new HashMap<>(); - this.caching = Boolean.parseBoolean( settings.get( "Caching" ) ); - this.cachingAdapterTargetName = settings.get( "CachingAdapterTargetName" ); - new Thread( () -> { - createInformationPage(); - enableInformationPage(); - } ).start(); - - //createInformationPage(); - //enableInformationPage(); - } - - - private void setClientURL( String clientURL ) { - Web3j web3j = Web3j.build( new HttpService( clientURL ) ); - try { - BigInteger latest = web3j.ethBlockNumber().send().getBlockNumber(); - } catch ( Exception e ) { - throw new RuntimeException( "Unable to connect the client URL '" + clientURL + "'" ); - } - web3j.shutdown(); - this.clientURL = clientURL; - } - - - @Override - public void createNewSchema( SchemaPlus rootSchema, String name ) { - currentSchema = new EthereumSchema( this.clientURL ); - } - - - @Override - public Table createTableSchema( CatalogTable combinedTable, List columnPlacementsOnStore, CatalogPartitionPlacement partitionPlacement ) { - return currentSchema.createBlockchainTable( combinedTable, columnPlacementsOnStore, this ); - } - - - @Override - public Schema getCurrentSchema() { - return currentSchema; - } - - - @Override - public void truncate( Context context, CatalogTable table ) { - throw new RuntimeException( "Blockchain adapter does not support truncate" ); - } - - - // Because the EthereumAdapter is a source, Pp will ask (call this method) always what the structure of this is adapter is. - @Override - public Map> getExportedColumns() { - log.warn( "getExportedColumn" ); - // Ensure that this block of code is called only once by checking if 'map' is null before proceeding - if ( map != null ) { - return map; - } - - Map> map = new HashMap<>(); - - String[] blockColumns = { "number", "hash", "parent_hash", "nonce", "sha3uncles", "logs_bloom", "transactions_root", "state_root", "receipts_root", "author", "miner", "mix_hash", "difficulty", "total_difficulty", "extra_data", "size", "gas_limit", "gas_used", "timestamp" }; - PolyType[] blockTypes = { PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.TIMESTAMP }; - createExportedColumns( "block", map, blockColumns, blockTypes ); - - String[] transactionColumns = { "hash", "nonce", "block_hash", "block_number", "transaction_index", "from", "to", "value", "gas_price", "gas", "input", "creates", "public_key", "raw", "r", "s" }; - PolyType[] transactionTypes = { PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR }; - createExportedColumns( "transaction", map, transactionColumns, transactionTypes ); - - String[] commonEventColumns = { "removed", "log_index", "transaction_index", "transaction_hash", "block_hash", "block_number", "address" }; - PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR }; - createExportedColumnsForEvents( map, commonEventColumns, commonEventTypes ); - - if ( caching == Boolean.TRUE ) { - // Disable caching to prevent multiple unnecessary attempts to cache the same data. - caching = false; - this.map = map; - // todo: fix concurrency issues (dirty solution right now) - new Thread( () -> { - try { - Thread.sleep( 1200 ); - } catch ( InterruptedException e ) { - throw new RuntimeException( e ); - } - try { - Map> eventsPerContract = eventDataMap.values().stream() - .collect(Collectors.groupingBy( - EventData::getSmartContractAddress, - Collectors.toList() - )); - CatalogAdapter cachingAdapter = Catalog.getInstance().getAdapter( cachingAdapterTargetName ); - EventCacheManager.getInstance() - .register( getAdapterId(), cachingAdapter.id, clientURL, 50, fromBlock, toBlock, eventsPerContract, map ) - .initializeCaching(); - } catch ( UnknownAdapterException e ) { - // If the specified adapter is not found, throw a RuntimeException - throw new RuntimeException( e ); - } - } ).start(); - } - - return map; - } - - - @Override - public boolean prepare( PolyXid xid ) { - log.debug( "Blockchain Store does not support prepare()." ); - return true; - } - - - @Override - public void commit( PolyXid xid ) { - log.debug( "Blockchain Store does not support commit()." ); - } - - - @Override - public void rollback( PolyXid xid ) { - log.debug( "Blockchain Store does not support rollback()." ); - } - - - @Override - public void shutdown() { - removeInformationPage(); - } - - - @Override - protected void reloadSettings( List updatedSettings ) { - if ( updatedSettings.contains( "ClientUrl" ) ) { - setClientURL( settings.get( "ClientUrl" ) ); - } - if ( updatedSettings.contains( "Blocks" ) ) { - this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); - } - if ( updatedSettings.contains( "ExperimentalFiltering" ) ) { - this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); - } - } - - - protected void createInformationPage() { - for ( Map.Entry> entry : getExportedColumns().entrySet() ) { - InformationGroup group = new InformationGroup( - informationPage, - entry.getValue().get( 0 ).physicalSchemaName + "." + entry.getValue().get( 0 ).physicalTableName ); - - InformationTable table = new InformationTable( - group, - Arrays.asList( "Position", "Column Name", "Type", "Primary" ) ); - for ( ExportedColumn exportedColumn : entry.getValue() ) { - table.addRow( - exportedColumn.physicalPosition, - exportedColumn.name, - exportedColumn.getDisplayType(), - exportedColumn.primary ? "✔" : "" - ); - } - informationElements.add( table ); - informationGroups.add( group ); - } - } - - - protected List getEventsFromABI( String etherscanApiKey, String contractAddress ) { - List events = new ArrayList<>(); - try { - URL url = new URL( "https://api.etherscan.io/api?module=contract&action=getabi&address=" + contractAddress + "&apikey=" + etherscanApiKey ); - HttpURLConnection connection = (HttpURLConnection) url.openConnection(); - connection.setRequestMethod( "GET" ); - int responseCode = connection.getResponseCode(); - if ( responseCode == HttpURLConnection.HTTP_OK ) { - BufferedReader in = new BufferedReader( new InputStreamReader( connection.getInputStream() ) ); - String inputLine; - StringBuilder response = new StringBuilder(); - - while ( (inputLine = in.readLine()) != null ) { - response.append( inputLine ); - } - in.close(); - - JSONObject jsonObject = new JSONObject( response.toString() ); - String abi = jsonObject.getString( "result" ); - JSONArray abiArray = new JSONArray( abi ); // Convert ABI string to JSON Array - for ( int i = 0; i < abiArray.length(); i++ ) { - JSONObject obj = abiArray.getJSONObject( i ); - // Check if the current object is an event - if ( obj.getString( "type" ).equals( "event" ) ) { - events.add( obj ); - } - } - } - - } catch ( IOException e ) { - // todo: handle errors; for example no abi or internet connection etc. - log.warn( "GET EVENTS ERROR" ); - throw new RuntimeException( e ); - } - - return events; - } - - - protected Event getEventFromCatalogTable( String catalogTableName ) { - if ( catalogTableName.equals( "block" ) || catalogTableName.equals( "transaction" ) ) { - return null; - } - return eventDataMap.get( catalogTableName ).getEvent(); - } - - - protected String getSmartContractAddressFromCatalogTable( String catalogTableName ) { - if ( catalogTableName.equals( "block" ) || catalogTableName.equals( "transaction" ) ) { - return null; - } - return eventDataMap.get( catalogTableName ).getSmartContractAddress(); - - } - - - private void createExportedColumns( String physicalTableName, Map> map, String[] columns, PolyType[] types ) { - PolyType collectionsType = null; - Integer length = 300; - Integer scale = null; - Integer dimension = null; - Integer cardinality = null; - int position = 0; - List cols = new ArrayList<>(); - for ( String col : columns ) { - cols.add( new ExportedColumn( - col, - types[position], - collectionsType, - length, - scale, - dimension, - cardinality, - false, - SCHEMA_NAME, - physicalTableName, - col, - position, - position == 0 ) ); - position++; - - } - map.put( physicalTableName, cols ); - } - - - private void createExportedColumnsForEvents( Map> map, String[] commonEventColumns, PolyType[] commonEventTypes ) { - for ( String address : smartContractAddresses ) { - // todo: API Rate Limits Etherscan. If called inside for loop it can cause error - String contractName = null; - List contractEvents = null; - try { - contractName = callWithExponentialBackoff(() -> getContractName(address)); - contractEvents = callWithExponentialBackoff(() -> getEventsFromABI(etherscanApiKey, address)); - } catch ( Exception e ) { - throw new RuntimeException( e ); - } - - // String contractName = getContractName( address ); - // List contractEvents = getEventsFromABI( etherscanApiKey, address ); - - for ( JSONObject event : contractEvents ) { - if ( event.getBoolean( "anonymous" ) ) { - continue; - } - String eventName = event.getString( "name" ); // to match it later with catalogTable.name - String compositeKey = contractName + "_" + eventName; // e.g. Uni_Transfer & Dai_Transfer - JSONArray abiInputs = event.getJSONArray( "inputs" ); // indexed and non-indexed values (topics + data) - eventDataMap.put( compositeKey.toLowerCase(), new EventData( eventName, contractName, address, abiInputs ) ); - } - } - - PolyType collectionsType = null; - Integer scale = null; - Integer dimension = null; - Integer cardinality = null; - - // Event Data: Creating columns for each event for specified smart contract based on ABI - for ( Map.Entry eventEntry : eventDataMap.entrySet() ) { - // String eventName = eventEntry.getValue().getOriginalKey(); // Get the original event name - String compositeEventName = eventEntry.getValue().getCompositeName(); - JSONArray abiInputs = eventEntry.getValue().getAbiInputs(); // Get the data - List eventDataCols = new ArrayList<>(); - int inputPosition = 0; - - for ( int i = 0; i < abiInputs.length(); i++ ) { - JSONObject inputObject = abiInputs.getJSONObject( i ); - String col = inputObject.getString( "name" ); - PolyType type = convertToPolyType( inputObject.getString( "type" ) ); // convert event types to polytype - eventDataCols.add( new ExportedColumn( - col, - type, - collectionsType, - getLengthForType( type ), - scale, - dimension, - cardinality, - false, - SCHEMA_NAME, - compositeEventName, // event name - col, - inputPosition, - inputPosition == 0 - ) ); - inputPosition++; - } - - // Adding common columns - for ( int i = 0; i < commonEventColumns.length; i++ ) { - String columnName = commonEventColumns[i]; - PolyType columnType = commonEventTypes[i]; - eventDataCols.add( new ExportedColumn( - columnName, - columnType, - collectionsType, - getLengthForType( columnType ), - scale, - dimension, - cardinality, - false, - SCHEMA_NAME, - compositeEventName, // event name - columnName, - inputPosition, - inputPosition == 0 - ) ); - inputPosition++; - } - - map.put( compositeEventName, eventDataCols ); - } - - } - - - private String getContractName( String contractAddress ) { - try { - URL url = new URL( "https://api.etherscan.io/api?module=contract&action=getsourcecode&address=" + contractAddress + "&apikey=" + etherscanApiKey ); - HttpURLConnection connection = (HttpURLConnection) url.openConnection(); - connection.setRequestMethod( "GET" ); - int responseCode = connection.getResponseCode(); - if ( responseCode == HttpURLConnection.HTTP_OK ) { - BufferedReader in = new BufferedReader( new InputStreamReader( connection.getInputStream() ) ); - String inputLine; - StringBuilder response = new StringBuilder(); - - while ( (inputLine = in.readLine()) != null ) { - response.append( inputLine ); - } - in.close(); - - JSONObject jsonObject = new JSONObject( response.toString() ); - JSONArray resultArray = jsonObject.getJSONArray( "result" ); // Get result array - if ( resultArray.length() > 0 ) { - JSONObject contractObject = resultArray.getJSONObject( 0 ); // Get the first object in result array - return contractObject.getString( "ContractName" ); // Return ContractName field - } - - } - - } catch ( IOException e ) { - // todo: handle errors; for example no abi or internet connection etc. - throw new RuntimeException( e ); - } - return null; - } - - - private Integer getLengthForType( PolyType type ) { - switch ( type ) { - case VARCHAR: - return 300; - default: - return null; - } - } - - - static PolyType convertToPolyType( String type ) { - // todo: convert all types in evm to polytype - switch ( type ) { - case "address": - return PolyType.VARCHAR; - case "uint256": - return PolyType.BIGINT; - default: - return null; - } - } - - public T callWithExponentialBackoff( Callable callable) throws Exception { - int maxRetries = 5; - long waitTime = 1000; // 1 second - - for (int retry = 0; retry < maxRetries; retry++) { - try { - return callable.call(); - } catch (Exception e) { - if (retry == maxRetries - 1) { - throw e; // If this was our last retry, rethrow the exception - } - try { - Thread.sleep(waitTime); - } catch (InterruptedException ie) { - Thread.currentThread().interrupt(); // Restore the interrupted status - } - waitTime *= 2; // Double the delay for the next retry - } - } - throw new Exception("Exponential backoff failed after " + maxRetries + " attempts."); - } - - - - } - } \ No newline at end of file diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java index ebc6136d6f..1c0852cded 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java @@ -21,7 +21,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; -import org.polypheny.db.adapter.ethereum.EthereumPlugin.EthereumDataSource; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeImpl; diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java index 12962be3fd..7b3ffed976 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java @@ -25,7 +25,6 @@ import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; import org.polypheny.db.adapter.DataContext; -import org.polypheny.db.adapter.ethereum.EthereumPlugin.EthereumDataSource; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index 3e6b2c2e8e..4267ae6a92 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -42,8 +42,6 @@ import org.web3j.protocol.core.methods.response.Log; import org.web3j.protocol.http.HttpService; -// TODO evtl.: Man könnte es noch weiter abtrennen. Jedes Event hat einen Cache. Bzw. jedes Event macht sein eigenes caching (hat seine eigen URL) -// könnte evtl. logistisch für Java einfacher sein. @Slf4j // library to use logging annotations public class EventCache { @@ -83,59 +81,8 @@ private void addLogsToCache( String address, EventData eventData, BigInteger sta try { List rawLogs = web3j.ethGetLogs( filter ).send().getLogs(); - - List> structuredLogs = new ArrayList<>(); - - for ( EthLog.LogResult rawLogResult : rawLogs ) { - Log rawLog = (Log) rawLogResult.get(); - List structuredLog = new ArrayList<>(); - - // Add all indexed values first (topics) - for ( int i = 0; i < event.getParameters().size(); i++ ) { - TypeReference paramType = event.getParameters().get( i ); - if ( paramType.isIndexed() ) { - structuredLog.add( extractIndexedValue( rawLog, paramType, i ) ); - } - } - - // Then add all non-indexed values (data) - int nonIndexedPosition = 0; // Separate index for non-indexed parameters - for ( int i = 0; i < event.getParameters().size(); i++ ) { - TypeReference paramType = event.getParameters().get( i ); - if ( !paramType.isIndexed() ) { - structuredLog.add( extractNonIndexedValue( rawLog, paramType, nonIndexedPosition, event ) ); - nonIndexedPosition++; - } - } - - // Add other log information as needed - structuredLog.add( rawLog.isRemoved() ); - structuredLog.add( rawLog.getLogIndex() ); - structuredLog.add( rawLog.getTransactionIndex() ); - structuredLog.add( rawLog.getTransactionHash() ); - structuredLog.add( rawLog.getBlockHash() ); - structuredLog.add( rawLog.getBlockNumber() ); - structuredLog.add( rawLog.getAddress() ); - - structuredLogs.add( structuredLog ); - } - - // If cache is a Map>>, you can store structuredLogs as follows + List> structuredLogs = normalizeLogs( event, rawLogs ); cache.put( eventData, structuredLogs ); - - // We are still writing to memory with logs & .addAll. Right now we will use the memory space. - //cache.get( event ).addAll( rawLogs ); - - // Without using the memory: - // Directly write to store. How? - // 1. call getLogs method which returns logs - // 2. write it directly to the store: writeToStore( getLogs() ) - // This can be done synchronously. David thinks this method is good for my project. This means we don't need the cache Hashmap anymore. - - // or (again using a little bit of memory) - // use also a hashmap like above, write them into the map (like right now) but this time use multithreading - // so when one value is put into the map another is written to the store asynchronously - } catch ( IOException e ) { // Handle exception here. Maybe log an error and re-throw, or set `logs` to an empty list. } @@ -155,5 +102,52 @@ private Object extractNonIndexedValue( Log rawLog, TypeReference paramType, i } + private List> normalizeLogs( Event event, List rawLogs ) { + List> structuredLogs = new ArrayList<>(); + for ( EthLog.LogResult rawLogResult : rawLogs ) { + Log rawLog = (Log) rawLogResult.get(); + + if ( rawLog.getLogIndex() == null || + rawLog.getTransactionIndex() == null || + rawLog.getBlockNumber() == null ) { + continue; // don't add pending logs because of primary key + } + + List structuredLog = new ArrayList<>(); + + // Add all indexed values first (topics) + for ( int i = 0; i < event.getParameters().size(); i++ ) { + TypeReference paramType = event.getParameters().get( i ); + if ( paramType.isIndexed() ) { + structuredLog.add( extractIndexedValue( rawLog, paramType, i ) ); + } + } + + // Then add all non-indexed values (data) + int nonIndexedPosition = 0; // Separate index for non-indexed parameters + for ( int i = 0; i < event.getParameters().size(); i++ ) { + TypeReference paramType = event.getParameters().get( i ); + if ( !paramType.isIndexed() ) { + structuredLog.add( extractNonIndexedValue( rawLog, paramType, nonIndexedPosition, event ) ); + nonIndexedPosition++; + } + } + + // Add other log information as needed + structuredLog.add( rawLog.isRemoved() ); + structuredLog.add( rawLog.getLogIndex() ); + structuredLog.add( rawLog.getTransactionIndex() ); + structuredLog.add( rawLog.getTransactionHash() ); + structuredLog.add( rawLog.getBlockHash() ); + structuredLog.add( rawLog.getBlockNumber() ); + structuredLog.add( rawLog.getAddress() ); + + structuredLogs.add( structuredLog ); + } + + return structuredLogs; + } + + } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 9a46c861fc..65ad9fea28 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -105,8 +105,8 @@ private EventCacheManager( TransactionManager transactionManager ) { } - public ContractCache register( int sourceAdapterId, int targetAdapterId, String clientUrl, int batchSizeInBlocks, BigInteger fromBlock, BigInteger toBlock, Map> eventsPerContract, Map> map ) { - ContractCache cache = new ContractCache( sourceAdapterId, targetAdapterId, clientUrl, batchSizeInBlocks, fromBlock, toBlock, eventsPerContract, map ); + public ContractCache register( int sourceAdapterId, int targetAdapterId, String clientUrl, int batchSizeInBlocks, BigInteger fromBlock, BigInteger toBlock, Map> eventsPerContract, Map> columns ) { + ContractCache cache = new ContractCache( sourceAdapterId, targetAdapterId, clientUrl, batchSizeInBlocks, fromBlock, toBlock, eventsPerContract, columns ); this.caches.put( sourceAdapterId, cache ); return cache; } @@ -127,7 +127,7 @@ void createTables( int sourceAdapterId, Map> tabl // For each table, a new table is created with their constraint (e.g., a primary key). for ( Entry> table : tableInformations.entrySet() ) { - ConstraintInformation primaryConstraint = new ConstraintInformation( table.getKey() + "primary", ConstraintType.PRIMARY, List.of( table.getValue().get( 0 ).name ) ); // todo atm first column is primary, we should adjust that + ConstraintInformation primaryConstraint = new ConstraintInformation( table.getKey() + "primary", ConstraintType.PRIMARY, List.of( "log_index", "transaction_index", "block_number" ) ); DdlManager.getInstance().createTable( namespaceId, table.getKey(), table.getValue(), List.of( primaryConstraint ), false, List.of( store ), PlacementType.AUTOMATIC, transaction.createStatement() ); } @@ -169,7 +169,7 @@ void writeToStore( String tableName, List> logResults ) { builder.push( LogicalValues.createOneRow( builder.getCluster() ) ); builder.project( rowType.getFieldList().stream().map( f -> new RexDynamicParam( f.getType(), f.getIndex() ) ).collect( Collectors.toList() ), rowType.getFieldNames() ); builder.insert( (AlgOptTable) table ); - // TODO: we should re-use this for all batches (ignore right now); David will do this + // todo: we should re-use this for all batches (ignore right now); David will do this AlgNode node = builder.build(); // Construct the algebraic node AlgRoot root = AlgRoot.of( node, Kind.INSERT ); // Wrap the node into an AlgRoot as required by Polypheny @@ -185,7 +185,7 @@ void writeToStore( String tableName, List> logResults ) { for ( List logResult : logResults ) { Object value = logResult.get( i ); // todo: converting to long (-2^63-1 till 2^63-1) from uint256 (2^256-1) if data is greater than 2^63-1 - // how to convert it to bigint? Is there a Poltype that can handle unit256? Double? + // how to convert it to bigint? Is there a Poltype that can handle unit256? Double? Evtl. Decimal? // is Bigint 64-bit signed integer? if ( value instanceof Address ) { value = value.toString(); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java index 1295cf5230..aa9f79932e 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java @@ -73,7 +73,7 @@ public String[] readNext() throws IOException { this.blockReads--; // Decrement blockReads when all logs for the current block have been processed } - // Decode the data field of the log + // Decode the data field of the log(non-indexed parameters) String data = log.getData(); List decodedData = FunctionReturnDecoder.decode( data, event.getNonIndexedParameters() ); From 2d4ce6288acafccc97b335f9ca7c2c73bb82eac9 Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Fri, 18 Aug 2023 19:34:08 +0200 Subject: [PATCH 13/22] Add caching route --- plugins/ethereum-adapter/build.gradle | 5 +- .../adapter/ethereum/EthereumDataSource.java | 57 ++++++++++++------- .../db/adapter/ethereum/EthereumPlugin.java | 7 +-- .../db/adapter/ethereum/EthereumStarter.java | 5 ++ .../db/adapter/ethereum/EventCache.java | 44 +++++++++----- .../adapter/ethereum/EventCacheManager.java | 3 +- .../db/adapter/ethereum/EventDataReader.java | 16 +++++- 7 files changed, 93 insertions(+), 44 deletions(-) diff --git a/plugins/ethereum-adapter/build.gradle b/plugins/ethereum-adapter/build.gradle index 37c67c847f..ee0abd44ce 100644 --- a/plugins/ethereum-adapter/build.gradle +++ b/plugins/ethereum-adapter/build.gradle @@ -5,7 +5,9 @@ dependencies { implementation project(path: ':core') compileOnly project(":core") - // Apache 2.0 + implementation project(':webui') + + // Apache 2.0 implementation(group: "org.web3j", name: "core", version: web3j_version) { exclude(group: "org.slf4j") } // Apache 2.0 @@ -17,6 +19,7 @@ dependencies { testImplementation project(path: ":core", configuration: "tests") // BSD 3-clause + } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java index 6dbea8b501..41c49194bf 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java @@ -67,15 +67,18 @@ @AdapterSettingString(name = "ClientUrl", description = "The URL of the ethereum JSON RPC client", defaultValue = "https://mainnet.infura.io/v3/4d06589e97064040b5da99cf4051ef04", position = 1) @AdapterSettingInteger(name = "Blocks", description = "The number of Blocks to fetch when processing a query", defaultValue = 10, position = 2, modifiable = true) @AdapterSettingBoolean(name = "ExperimentalFiltering", description = "Experimentally filter Past Block", defaultValue = false, position = 3, modifiable = true) -@AdapterSettingString(name = "SmartContractAddresses", description = "Comma sepretaed addresses of the smart contracts", defaultValue = "0x6b175474e89094c44da98b954eedeac495271d0f, 0x6b175474e89094c44da98b954eedeac495271d0f", position = 4, modifiable = true) // Event Data: Add annotation -@AdapterSettingString(name = "EtherscanApiKey", description = "Etherscan API Token", defaultValue = "PJBVZ3BE1AI5AKIMXGK1HNC59PCDH7CQSP", position = 5, modifiable = true) // Event Data: Add annotation -@AdapterSettingString(name = "fromBlock", description = "Fetch block from (Smart Contract)", defaultValue = "17669045", position = 6, modifiable = true) -@AdapterSettingString(name = "toBlock", description = "Fetch block to (Smart Contract)", defaultValue = "17669155", position = 7, modifiable = true) -@AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = true, position = 8, modifiable = true) -@AdapterSettingString(name = "CachingAdapterTargetName", description = "Adapter Target Name", defaultValue = "hsqldb", position = 9, modifiable = true) // todo DL: list +@AdapterSettingBoolean(name = "EventDataRetrieval", description = "Enables or disables the retrieval of event data. When set to true, all subsequent adapter settings will be taken into account.", defaultValue = true, position = 4, modifiable = true) +@AdapterSettingString(name = "SmartContractAddresses", description = "Comma sepretaed addresses of the smart contracts", defaultValue = "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984, 0x6b175474e89094c44da98b954eedeac495271d0f", position = 5, modifiable = true) // Event Data: Add annotation +@AdapterSettingString(name = "EtherscanApiKey", description = "Etherscan API Token", defaultValue = "PJBVZ3BE1AI5AKIMXGK1HNC59PCDH7CQSP", position = 6, modifiable = true) // Event Data: Add annotation +@AdapterSettingString(name = "fromBlock", description = "Fetch block from (Smart Contract)", defaultValue = "17669045", position = 7, modifiable = true) +@AdapterSettingString(name = "toBlock", description = "Fetch block to (Smart Contract)", defaultValue = "17669155", position = 8, modifiable = true) +@AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = true, position = 9, modifiable = true) +@AdapterSettingInteger(name = "batchSizeInBlocks", description = "Batch size for caching in blocks", defaultValue = 50, position = 10, modifiable = true) +@AdapterSettingString(name = "CachingAdapterTargetName", description = "Adapter Target Name", defaultValue = "hsqldb", position = 11, modifiable = true) // todo DL: list public class EthereumDataSource extends DataSource { public static final String SCHEMA_NAME = "public"; + private final boolean eventDataRetrieval; private String clientURL; @Getter private int blocks; @@ -89,6 +92,8 @@ public class EthereumDataSource extends DataSource { private final BigInteger fromBlock; @Getter private final BigInteger toBlock; + private final int batchSizeInBlocks; + private final Map eventDataMap; private Boolean caching; private String cachingAdapterTargetName; @@ -96,24 +101,25 @@ public class EthereumDataSource extends DataSource { private Map> map; - // todo: take it out public EthereumDataSource( final int storeId, final String uniqueName, final Map settings ) { super( storeId, uniqueName, settings, true ); setClientURL( settings.get( "ClientUrl" ) ); this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); + this.eventDataRetrieval = Boolean.parseBoolean( settings.get( "EventDataRetrieval" ) ); String smartContractAddressesStr = settings.get( "SmartContractAddresses" ); List smartContractAddresses = Arrays.stream( smartContractAddressesStr.split( "," ) ) .map( String::trim ) .collect( Collectors.toList() ); - this.smartContractAddresses = smartContractAddresses; // Event Data; Add smartContractAddress to EDataSource - // this.smartContractAddresses = Arrays.asList( "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984", "0x6b175474e89094c44da98b954eedeac495271d0f" ); // todo: get from adapter settings + this.smartContractAddresses = smartContractAddresses; this.etherscanApiKey = settings.get( "EtherscanApiKey" ); this.fromBlock = new BigInteger( settings.get( "fromBlock" ) ); this.toBlock = new BigInteger( settings.get( "toBlock" ) ); + this.batchSizeInBlocks = Integer.parseInt( settings.get( "batchSizeInBlocks" ) ); this.eventDataMap = new HashMap<>(); this.caching = Boolean.parseBoolean( settings.get( "Caching" ) ); this.cachingAdapterTargetName = settings.get( "CachingAdapterTargetName" ); + // todo DL new Thread( () -> { createInformationPage(); enableInformationPage(); @@ -178,6 +184,11 @@ public Map> getExportedColumns() { PolyType[] transactionTypes = { PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR }; createExportedColumns( "transaction", map, transactionColumns, transactionTypes ); + if ( eventDataRetrieval == false ) { + this.map = map; + return map; + } + String[] commonEventColumns = { "removed", "log_index", "transaction_index", "transaction_hash", "block_hash", "block_number", "address" }; PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR }; createExportedColumnsForEvents( map, commonEventColumns, commonEventTypes ); @@ -205,7 +216,7 @@ public Map> getExportedColumns() { ) ); CatalogAdapter cachingAdapter = Catalog.getInstance().getAdapter( cachingAdapterTargetName ); EventCacheManager.getInstance() - .register( getAdapterId(), cachingAdapter.id, clientURL, 50, fromBlock, toBlock, eventsPerContract, columns ) + .register( getAdapterId(), cachingAdapter.id, clientURL, batchSizeInBlocks, fromBlock, toBlock, eventsPerContract, columns ) .initializeCaching(); } catch ( UnknownAdapterException e ) { // If the specified adapter is not found, throw a RuntimeException @@ -312,7 +323,6 @@ private void createExportedColumns( String physicalTableName, Map> map, String[] commonEventColumns, PolyType[] commonEventTypes ) { for ( String address : smartContractAddresses ) { - // todo: API Rate Limits Etherscan. If called inside for loop it can cause error String contractName = null; List contractEvents = null; try { @@ -322,9 +332,6 @@ private void createExportedColumnsForEvents( Map> m throw new RuntimeException( e ); } - // String contractName = getContractName( address ); - // List contractEvents = getEventsFromABI( etherscanApiKey, address ); - for ( JSONObject event : contractEvents ) { if ( event.getBoolean( "anonymous" ) ) { continue; @@ -417,6 +424,13 @@ protected List getEventsFromABI( String etherscanApiKey, String cont in.close(); JSONObject jsonObject = new JSONObject( response.toString() ); + String apiStatus = jsonObject.getString( "status" ); + + if ( "0".equals( apiStatus ) ) { + String errorMessage = jsonObject.getString( "message" ); + throw new RuntimeException( "Etherscan API error getting abi from contract: " + errorMessage ); + } + String abi = jsonObject.getString( "result" ); JSONArray abiArray = new JSONArray( abi ); // Convert ABI string to JSON Array for ( int i = 0; i < abiArray.length(); i++ ) { @@ -429,9 +443,7 @@ protected List getEventsFromABI( String etherscanApiKey, String cont } } catch ( IOException e ) { - // todo: handle errors; for example no abi or internet connection etc. - log.warn( "GET EVENTS ERROR" ); - throw new RuntimeException( e ); + throw new RuntimeException( "Network or IO error occurred", e ); } return events; @@ -455,17 +467,22 @@ private String getContractName( String contractAddress ) { in.close(); JSONObject jsonObject = new JSONObject( response.toString() ); + String apiStatus = jsonObject.getString( "status" ); + + if ( "0".equals( apiStatus ) ) { + String errorMessage = jsonObject.getString( "message" ); + throw new RuntimeException( "Etherscan API error getting contract name: " + errorMessage ); + } + JSONArray resultArray = jsonObject.getJSONArray( "result" ); // Get result array if ( resultArray.length() > 0 ) { JSONObject contractObject = resultArray.getJSONObject( 0 ); // Get the first object in result array return contractObject.getString( "ContractName" ); // Return ContractName field } - } } catch ( IOException e ) { - // todo: handle errors; for example no abi or internet connection etc. - throw new RuntimeException( e ); + throw new RuntimeException( "Network or IO error occurred", e ); } return null; } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index 5ee32f1d09..4efcee9f53 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -18,6 +18,7 @@ import com.google.common.collect.ImmutableMap; +import org.polypheny.db.webui.HttpServer; import java.util.Map; import org.pf4j.Plugin; import org.pf4j.PluginWrapper; @@ -53,13 +54,11 @@ public void start() { TransactionExtension.REGISTER.add( new EthereumStarter() ); // add extension to transaction manager Adapter.addAdapter( EthereumDataSource.class, ADAPTER_NAME, settings ); + } - @Override - public void stop() { - Adapter.removeAdapter( EthereumDataSource.class, ADAPTER_NAME ); - } + } \ No newline at end of file diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java index 9f11e7fd0e..1a53b46b88 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java @@ -20,6 +20,8 @@ import org.polypheny.db.iface.Authenticator; import org.polypheny.db.processing.TransactionExtension; import org.polypheny.db.transaction.TransactionManager; +import org.polypheny.db.webui.HttpServer; +import org.polypheny.db.webui.HttpServer.HandlerType; // helper method, because Polypheny will create the TransactionManager (TM) relatively late // Polypheny will startup and then get all the plugins @@ -31,6 +33,9 @@ public class EthereumStarter implements TransactionExtension { @Override public void initExtension( TransactionManager manager, Authenticator authenticator ) { EventCacheManager.getAndSet( manager ); + EventCacheManager eventCacheManager = EventCacheManager.getInstance(); + HttpServer server = HttpServer.getInstance(); + server.addRoute( "getEventCacheStatus", ( request, crud ) -> eventCacheManager.getAllStreamStatus(), Void.class, HandlerType.GET ); } } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index 4267ae6a92..1410032c7e 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -36,6 +36,7 @@ import org.web3j.abi.datatypes.Type; import org.web3j.protocol.Web3j; import org.web3j.protocol.core.DefaultBlockParameter; +import org.web3j.protocol.core.Response; import org.web3j.protocol.core.methods.request.EthFilter; import org.web3j.protocol.core.methods.response.EthLog; import org.web3j.protocol.core.methods.response.EthLog.LogResult; @@ -80,28 +81,29 @@ private void addLogsToCache( String address, EventData eventData, BigInteger sta filter.addSingleTopic( EventEncoder.encode( event ) ); try { - List rawLogs = web3j.ethGetLogs( filter ).send().getLogs(); + EthLog ethLog = web3j.ethGetLogs( filter ).send(); // Get the EthLog response + + // todo: show on screen and update + /**if ( startBlock.equals( BigInteger.valueOf( 17669096 ) ) ) { + throw new RuntimeException( "Error fetching logs for block range: " + startBlock + " to " + endBlock ); // just start new caching from startBlock + } + **/ + + if ( ethLog.hasError() ) { + Response.Error error = ethLog.getError(); + log.error( "Error fetching logs: " + error.getMessage() ); + throw new RuntimeException( "Error fetching logs for block range: " + startBlock + " to " + endBlock + ". Message: " + error.getMessage() ); // just start new caching from startBlock + } + List rawLogs = ethLog.getLogs(); List> structuredLogs = normalizeLogs( event, rawLogs ); cache.put( eventData, structuredLogs ); + } catch ( IOException e ) { - // Handle exception here. Maybe log an error and re-throw, or set `logs` to an empty list. + throw new RuntimeException( "IO Error fetching logs", e ); } } - private Object extractIndexedValue( Log rawLog, TypeReference paramType, int position ) { - // Get the indexed parameter from the log based on its position - String topics = rawLog.getTopics().get( position + 1 ); // The first topic is usually the event signature - return FunctionReturnDecoder.decodeIndexedValue( topics, paramType ); - } - - - private Object extractNonIndexedValue( Log rawLog, TypeReference paramType, int position, Event event ) { - List decodedValue = FunctionReturnDecoder.decode( rawLog.getData(), event.getNonIndexedParameters() ); - return decodedValue.get( position ); - } - - private List> normalizeLogs( Event event, List rawLogs ) { List> structuredLogs = new ArrayList<>(); for ( EthLog.LogResult rawLogResult : rawLogs ) { @@ -149,5 +151,17 @@ private List> normalizeLogs( Event event, List ra } + private Object extractIndexedValue( Log rawLog, TypeReference paramType, int position ) { + // Get the indexed parameter from the log based on its position + String topics = rawLog.getTopics().get( position + 1 ); // The first topic is usually the event signature + return FunctionReturnDecoder.decodeIndexedValue( topics, paramType ); + } + + + private Object extractNonIndexedValue( Log rawLog, TypeReference paramType, int position, Event event ) { + List decodedValue = FunctionReturnDecoder.decode( rawLog.getData(), event.getNonIndexedParameters() ); + return decodedValue.get( position ); + } + } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 65ad9fea28..e5c66d2592 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -60,7 +60,6 @@ import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.transaction.TransactionManager; import org.web3j.abi.datatypes.Address; -import org.web3j.abi.datatypes.Event; import org.web3j.abi.datatypes.generated.Uint256; @@ -212,7 +211,7 @@ void writeToStore( String tableName, List> logResults ) { } - private Map getAllStreamStatus() { + protected Map getAllStreamStatus() { // return status of process return caches.values().stream().collect( Collectors.toMap( c -> c.sourceAdapterId, ContractCache::getStatus ) ); } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java index aa9f79932e..3988b0ca39 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java @@ -22,8 +22,10 @@ import java.util.List; import java.util.Arrays; import java.util.function.Predicate; +import lombok.extern.slf4j.Slf4j; import org.web3j.abi.FunctionReturnDecoder; import org.web3j.abi.datatypes.Type; +import org.web3j.protocol.core.Response; import org.web3j.protocol.core.methods.response.EthLog; import org.web3j.protocol.core.methods.response.Log; import org.web3j.protocol.core.methods.request.EthFilter; @@ -32,6 +34,7 @@ import org.web3j.abi.TypeReference; import org.web3j.abi.EventEncoder; +@Slf4j public class EventDataReader extends BlockReader { private List logs; @@ -52,9 +55,18 @@ public class EventDataReader extends BlockReader { filter.addSingleTopic( EventEncoder.encode( event ) ); try { - logs = web3j.ethGetLogs( filter ).send().getLogs(); // get logs + EthLog ethLog = web3j.ethGetLogs( filter ).send(); // Get the EthLog response + + if ( ethLog.hasError() ) { + Response.Error error = ethLog.getError(); + log.error( "Error fetching logs: " + error.getMessage() ); + throw new RuntimeException( "Error fetching logs: " + error.getMessage() ); + } + + logs = ethLog.getLogs(); + } catch ( IOException e ) { - // Handle exception here. Maybe log an error and re-throw, or set `logs` to an empty list. + throw new RuntimeException( "IO Error fetching logs", e ); } } From 85d8527b223123442ffbf9ce6d266944eb984943 Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Mon, 28 Aug 2023 15:12:25 +0200 Subject: [PATCH 14/22] Update caching logic and fix store value conversions --- .../main/java/org/polypheny/db/util/BsonUtil.java | 3 +++ .../java/org/polypheny/db/ddl/DdlManagerImpl.java | 3 ++- .../adapter/cottontail/util/CottontailTypeUtil.java | 4 ++++ .../polypheny/db/adapter/ethereum/CachingStatus.java | 7 +++++++ .../polypheny/db/adapter/ethereum/ContractCache.java | 12 +++++++++--- .../db/adapter/ethereum/EthereumDataSource.java | 2 +- .../db/adapter/ethereum/EthereumFieldType.java | 5 ++++- .../polypheny/db/adapter/ethereum/EventCache.java | 11 +++++------ .../db/adapter/ethereum/EventCacheManager.java | 9 +++++---- 9 files changed, 40 insertions(+), 16 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/util/BsonUtil.java b/core/src/main/java/org/polypheny/db/util/BsonUtil.java index f4aabc18d7..c26cc9964b 100644 --- a/core/src/main/java/org/polypheny/db/util/BsonUtil.java +++ b/core/src/main/java/org/polypheny/db/util/BsonUtil.java @@ -20,6 +20,7 @@ import java.io.InputStream; import java.io.PushbackInputStream; import java.math.BigDecimal; +import java.math.BigInteger; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; @@ -354,6 +355,8 @@ private static BsonValue handleBigInt( Object obj ) { if ( obj instanceof Long ) { return new BsonInt64( (Long) obj ); + } else if ( obj instanceof BigInteger ) { + return new BsonInt64(( (BigInteger) obj ).longValue()); } else { return new BsonInt64( (Integer) obj ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 338d484a69..6905246eb8 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -239,7 +239,8 @@ public void addAdapter( String uniqueName, String adapterName, AdapterType adapt exportedColumns = ((DataSource) adapter).getExportedColumns(); } catch ( Exception e ) { AdapterManager.getInstance().removeAdapter( adapter.getAdapterId() ); - throw new RuntimeException( "Could not deploy adapter", e ); + // throw new RuntimeException( "Could not deploy adapter", e ); + throw new RuntimeException( "Could not deploy adapter: " + e.getMessage(), e ); } // Create table, columns etc. for ( Map.Entry> entry : exportedColumns.entrySet() ) { diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/util/CottontailTypeUtil.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/util/CottontailTypeUtil.java index 7bb90f93b2..b7498a4051 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/util/CottontailTypeUtil.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/util/CottontailTypeUtil.java @@ -19,6 +19,7 @@ import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.math.BigDecimal; +import java.math.BigInteger; import java.util.ArrayList; import java.util.Calendar; import java.util.GregorianCalendar; @@ -312,6 +313,9 @@ public static CottontailGrpc.Literal toData( Object value, PolyType actualType, if ( value instanceof Long ) { return builder.setLongData( ((Long) value) ).build(); } + if ( value instanceof BigInteger ) { + return builder.setLongData(( (BigInteger) value ).longValue()).build(); + } break; } case INTEGER: diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java index 3c6d2d11e6..63a3bab219 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java @@ -16,11 +16,18 @@ package org.polypheny.db.adapter.ethereum; +import java.math.BigInteger; + public class CachingStatus { public float percent; public ProcessingState state; + public BigInteger fromBlock; + public BigInteger toBlock; + public BigInteger currentBlock; + public BigInteger currentEndBlock; + public enum ProcessingState { diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java index 2747a52282..cff8721e7a 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java @@ -82,7 +82,7 @@ private void createSchema() { .stream() .collect( Collectors.toMap( - table -> EthereumPlugin.HIDDEN_PREFIX + table.getKey(), // we prepend this to hide the table to the user + table -> EthereumPlugin.HIDDEN_PREFIX + "__" + targetAdapterId + "__" + table.getKey(), // we prepend this to hide the table to the user table -> table.getValue() .stream() .map( ExportedColumn::toFieldInformation ) @@ -107,7 +107,7 @@ public void startCaching() { for ( Map.Entry entry : cache.entrySet() ) { String address = entry.getKey(); EventCache eventCache = entry.getValue(); - eventCache.addToCache( address, currentBlock, endBlock ); + eventCache.addToCache( address, currentBlock, endBlock, targetAdapterId ); } currentBlock = endBlock.add( BigInteger.ONE ); // avoid overlapping block numbers @@ -118,13 +118,19 @@ public void startCaching() { public CachingStatus getStatus() { CachingStatus status = new CachingStatus(); BigInteger totalBlocks = toBlock.subtract( fromBlock ).add( BigInteger.ONE ); + status.fromBlock = fromBlock; + status.toBlock = toBlock; + status.currentBlock = currentBlock; + status.currentEndBlock = currentBlock.add(BigInteger.valueOf(batchSizeInBlocks)); if ( currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ).compareTo( toBlock ) > 0 ) { status.percent = 100; status.state = CachingStatus.ProcessingState.DONE; + status.currentBlock = null; + status.currentEndBlock = null; } else { BigInteger processedBlocks = currentBlock.subtract( fromBlock ); - status.percent = processedBlocks.floatValue() / totalBlocks.floatValue() * 100; + status.percent = Math.round((processedBlocks.floatValue() / totalBlocks.floatValue() * 100) * 100) / 100f; if ( status.percent == 0 ) { status.state = CachingStatus.ProcessingState.INITIALIZED; diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java index 41c49194bf..0a267afd0a 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java @@ -524,7 +524,7 @@ static PolyType convertToPolyType( String type ) { return PolyType.VARCHAR; case "int": // 8 to 256... case "uint256": - return PolyType.BIGINT; + return PolyType.DECIMAL; // todo default: return null; } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java index 7ec2acad0f..eacfdf1eef 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumFieldType.java @@ -40,7 +40,8 @@ enum EthereumFieldType { DOUBLE( Primitive.DOUBLE ), DATE( java.sql.Date.class, "date" ), TIME( java.sql.Time.class, "time" ), - TIMESTAMP( java.sql.Timestamp.class, "timestamp" ); + TIMESTAMP( java.sql.Timestamp.class, "timestamp" ), + DECIMAL( java.math.BigDecimal.class, "decimal" ); private static final Map MAP = new HashMap<>(); @@ -89,6 +90,8 @@ public static EthereumFieldType getBlockchainFieldType( PolyType type ) { return EthereumFieldType.TIME; case TIMESTAMP: return EthereumFieldType.TIMESTAMP; + case DECIMAL: + return EthereumFieldType.DECIMAL; default: throw new RuntimeException( "Unsupported datatype: " + type.name() ); } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index 1410032c7e..b26839a408 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -58,13 +58,13 @@ public EventCache( List events, Web3j web3j ) { } - public void addToCache( String address, BigInteger startBlock, BigInteger endBlock ) { + public void addToCache( String address, BigInteger startBlock, BigInteger endBlock, int targetAdapterId ) { for ( EventData event : events ) { addLogsToCache( address, event, startBlock, endBlock ); if ( cache.get( event ).size() == 0 ) { continue; } - EventCacheManager.getInstance().writeToStore( event.getCompositeName(), cache.get( event ) ); // write the event into the store + EventCacheManager.getInstance().writeToStore( event.getCompositeName(), cache.get( event ), targetAdapterId ); // write the event into the store cache.get( event ).clear(); // clear cache batch } } @@ -84,10 +84,9 @@ private void addLogsToCache( String address, EventData eventData, BigInteger sta EthLog ethLog = web3j.ethGetLogs( filter ).send(); // Get the EthLog response // todo: show on screen and update - /**if ( startBlock.equals( BigInteger.valueOf( 17669096 ) ) ) { - throw new RuntimeException( "Error fetching logs for block range: " + startBlock + " to " + endBlock ); // just start new caching from startBlock - } - **/ + /*if ( startBlock.equals( BigInteger.valueOf( 17669096 ) ) ) { + throw new RuntimeException( "Error fetching logs for block range: " + startBlock + " to " + endBlock ); // just start new caching from startBlock + }*/ if ( ethLog.hasError() ) { Response.Error error = ethLog.getError(); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index e5c66d2592..6ca54efd36 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -16,6 +16,7 @@ package org.polypheny.db.adapter.ethereum; +import java.math.BigDecimal; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collections; @@ -151,7 +152,7 @@ private Transaction getTransaction() { } - void writeToStore( String tableName, List> logResults ) { + void writeToStore( String tableName, List> logResults, int targetAdapterId ) { if ( logResults.isEmpty() ) { return; } @@ -162,13 +163,13 @@ void writeToStore( String tableName, List> logResults ) { // TableEntry table = transaction.getSchema().getTable( EthereumPlugin.HIDDEN_PREFIX + tableName ); AlgOptSchema algOptSchema = transaction.getCatalogReader(); - AlgOptTable table = algOptSchema.getTableForMember( Collections.singletonList( EthereumPlugin.HIDDEN_PREFIX + tableName ) ); + AlgOptTable table = algOptSchema.getTableForMember( Collections.singletonList( EthereumPlugin.HIDDEN_PREFIX + "__" + targetAdapterId + "__" + tableName ) ); AlgDataType rowType = table.getTable().getRowType( transaction.getTypeFactory() ); builder.push( LogicalValues.createOneRow( builder.getCluster() ) ); builder.project( rowType.getFieldList().stream().map( f -> new RexDynamicParam( f.getType(), f.getIndex() ) ).collect( Collectors.toList() ), rowType.getFieldNames() ); builder.insert( (AlgOptTable) table ); - // todo: we should re-use this for all batches (ignore right now); David will do this + // todo DL: we should re-use this for all batches (ignore right now) AlgNode node = builder.build(); // Construct the algebraic node AlgRoot root = AlgRoot.of( node, Kind.INSERT ); // Wrap the node into an AlgRoot as required by Polypheny @@ -189,7 +190,7 @@ void writeToStore( String tableName, List> logResults ) { if ( value instanceof Address ) { value = value.toString(); } else if ( value instanceof Uint256 ) { - value = ((Uint256) value).getValue() == null ? null : ((Uint256) value).getValue().longValue(); + value = ((Uint256) value).getValue() == null ? null : new BigDecimal(((Uint256) value).getValue()); } fieldValues.add( value ); } From 661d9d4be6d391720a9d356db2f9098b4534fe1e Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Mon, 28 Aug 2023 19:21:11 +0200 Subject: [PATCH 15/22] Fix original bug concerning blocks and transactions with adding two adapters --- .../adapter/ethereum/EthereumDataSource.java | 2 + .../db/adapter/ethereum/EthereumMapper.java | 1 - .../db/adapter/ethereum/EthereumSchema.java | 26 +++-- .../db/adapter/ethereum/EthereumTable.java | 106 +++++++++++++----- 4 files changed, 100 insertions(+), 35 deletions(-) diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java index 0a267afd0a..77fa732ee2 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java @@ -78,6 +78,7 @@ public class EthereumDataSource extends DataSource { public static final String SCHEMA_NAME = "public"; + @Getter private final boolean eventDataRetrieval; private String clientURL; @Getter @@ -339,6 +340,7 @@ private void createExportedColumnsForEvents( Map> m String eventName = event.getString( "name" ); // to match it later with catalogTable.name String compositeKey = contractName + "_" + eventName; // e.g. Uni_Transfer & Dai_Transfer JSONArray abiInputs = event.getJSONArray( "inputs" ); // indexed and non-indexed values (topics + data) + eventDataMap.put( compositeKey.toLowerCase(), new EventData( eventName, contractName, address, abiInputs ) ); } } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java index 32be5981da..bb76b9bce4 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumMapper.java @@ -108,5 +108,4 @@ public BlockReader makeReader( String clientUrl, int blocks, Predicate i ).toArray(); - EthereumMapper mapper = catalogTable.name.equals( "block" ) ? EthereumMapper.BLOCK : catalogTable.name.equals( "transaction" ) ? EthereumMapper.TRANSACTION : EthereumMapper.EVENTDATA; // Event Data; add EVENTDATA + EthereumMapper mapper = catalogTable.name.startsWith( "block" ) ? EthereumMapper.BLOCK : catalogTable.name.startsWith( "transaction" ) ? EthereumMapper.TRANSACTION : EthereumMapper.EVENTDATA; // each table will get one EthereumTable; send event metadata down here. - EthereumTable table = new EthereumTable( + EthereumTable.Builder tableBuilder = new EthereumTable.Builder( clientUrl, AlgDataTypeImpl.proto( fieldInfo.build() ), fieldTypes, fields, mapper, ethereumDataSource, - catalogTable.id, - ethereumDataSource.getSmartContractAddressFromCatalogTable(catalogTable.name), - ethereumDataSource.getFromBlock(), - ethereumDataSource.getToBlock(), - ethereumDataSource.getEventFromCatalogTable(catalogTable.name) + catalogTable.id ); + + log.warn( catalogTable.name ); + log.warn( catalogTable.getNamespaceName() ); + log.warn( catalogTable.getDatabaseName() ); + log.warn( catalogTable.getOwnerName() ); + Boolean eventDataRetrieval = false; //ethereumDataSource.getEventDataRetrieval(); + if (eventDataRetrieval) { + tableBuilder + .contractAddress(ethereumDataSource.getSmartContractAddressFromCatalogTable(catalogTable.name)) + .fromBlock(ethereumDataSource.getFromBlock()) + .toBlock(ethereumDataSource.getToBlock()) + .event(ethereumDataSource.getEventFromCatalogTable(catalogTable.name)); + } + EthereumTable table = tableBuilder.build(); tableMap.put( catalogTable.name, table ); return table; } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java index 7b3ffed976..8186fdc038 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java @@ -49,30 +49,82 @@ public class EthereumTable extends AbstractTable implements FilterableTable { protected final Event event; - public EthereumTable( - String clientUrl, - AlgProtoDataType protoRowType, - List fieldTypes, - int[] fields, - EthereumMapper mapper, - EthereumDataSource ethereumDataSource, - Long tableId, - String contractAddress, - BigInteger fromBlock, - BigInteger toBlock, - Event event ) { - - this.clientUrl = clientUrl; - this.protoRowType = protoRowType; - this.fieldTypes = fieldTypes; - this.fields = fields; - this.ethereumDataSource = ethereumDataSource; - this.mapper = mapper; - this.tableId = tableId; - this.contractAddress = contractAddress; - this.fromBlock = fromBlock; - this.toBlock = toBlock; - this.event = event; + public EthereumTable( Builder builder ) { + this.clientUrl = builder.clientUrl; + this.protoRowType = builder.protoRowType; + this.fieldTypes = builder.fieldTypes; + this.fields = builder.fields; + this.ethereumDataSource = builder.ethereumDataSource; + this.mapper = builder.mapper; + this.tableId = builder.tableId; + this.contractAddress = builder.contractAddress; + this.fromBlock = builder.fromBlock; + this.toBlock = builder.toBlock; + this.event = builder.event; + } + + + public static class Builder { + + protected final String clientUrl; + protected final AlgProtoDataType protoRowType; + protected final int[] fields; + protected final EthereumDataSource ethereumDataSource; + protected final EthereumMapper mapper; + protected List fieldTypes; + protected Long tableId; + + private String contractAddress = null; + private BigInteger fromBlock = null; + private BigInteger toBlock = null; + private Event event = null; + + + public Builder( String clientUrl, + AlgProtoDataType protoRowType, + List fieldTypes, + int[] fields, + EthereumMapper mapper, + EthereumDataSource ethereumDataSource, + Long tableId ) { + this.clientUrl = clientUrl; + this.protoRowType = protoRowType; + this.fieldTypes = fieldTypes; + this.fields = fields; + this.ethereumDataSource = ethereumDataSource; + this.mapper = mapper; + this.tableId = tableId; + } + + + public Builder contractAddress( String val ) { + this.contractAddress = val; + return this; + } + + + public Builder fromBlock( BigInteger val ) { + this.fromBlock = val; + return this; + } + + + public Builder toBlock( BigInteger val ) { + this.toBlock = val; + return this; + } + + + public Builder event( Event val ) { + this.event = val; + return this; + } + + + public EthereumTable build() { + return new EthereumTable( this ); + } + } @@ -117,11 +169,11 @@ public Enumerator enumerator() { null, mapper, finalBlockNumberPredicate, - (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ) , + (EthereumEnumerator.RowConverter) EthereumEnumerator.converter( fieldTypes, fields ), contractAddress, fromBlock, toBlock, - event); + event ); } }; } @@ -140,7 +192,7 @@ public Enumerator enumerator() { contractAddress, fromBlock, toBlock, - event); + event ); } }; } From f8d198cf558198eadad41db0917dcf8ff001709a Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Mon, 28 Aug 2023 20:27:38 +0200 Subject: [PATCH 16/22] Fix bug adding two adapters --- .../db/adapter/ethereum/EthereumSchema.java | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java index daca074fd5..df36745f04 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java @@ -73,24 +73,32 @@ public Table createBlockchainTable( CatalogTable catalogTable, List getTableMap() { From 5bacf82c0cec16ebd86649caa7dcd2cb652bf1c2 Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Tue, 29 Aug 2023 03:10:48 +0200 Subject: [PATCH 17/22] Display error messages and enable multiple caching status for adapters --- .../db/adapter/ethereum/CacheException.java | 30 +++++++++++++ .../db/adapter/ethereum/CachingStatus.java | 5 ++- .../db/adapter/ethereum/ContractCache.java | 24 +++++++++- .../adapter/ethereum/EthereumDataSource.java | 44 ++++++++++++++++--- .../db/adapter/ethereum/EventCache.java | 9 +--- 5 files changed, 97 insertions(+), 15 deletions(-) create mode 100644 plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CacheException.java diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CacheException.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CacheException.java new file mode 100644 index 0000000000..1d11e15c30 --- /dev/null +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CacheException.java @@ -0,0 +1,30 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.ethereum; + +public class CacheException extends RuntimeException { + + public CacheException( String message ) { + super( message ); + } + + + public CacheException( String message, Throwable cause ) { + super( message, cause ); + } + +} diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java index 63a3bab219..b22ac6f4fc 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java @@ -27,11 +27,12 @@ public class CachingStatus { public BigInteger toBlock; public BigInteger currentBlock; public BigInteger currentEndBlock; - + public int sourceAdapterId; + public String errorMessage; public enum ProcessingState { - INITIALIZED, PROCESSING, DONE + INITIALIZED, PROCESSING, DONE, ERROR } } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java index cff8721e7a..914bf61c97 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java @@ -25,6 +25,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DataSource.ExportedColumn; +import org.polypheny.db.adapter.ethereum.CachingStatus.ProcessingState; import org.polypheny.db.ddl.DdlManager.FieldInformation; import org.web3j.abi.EventEncoder; import org.web3j.abi.FunctionReturnDecoder; @@ -48,6 +49,9 @@ public class ContractCache { private final BigInteger fromBlock; private final BigInteger toBlock; private BigInteger currentBlock; + private boolean hasError = false; + private String errorMessage; + private final Map cache = new ConcurrentHashMap<>(); // a cache for each event private final Map> eventsPerContract; @@ -107,7 +111,19 @@ public void startCaching() { for ( Map.Entry entry : cache.entrySet() ) { String address = entry.getKey(); EventCache eventCache = entry.getValue(); - eventCache.addToCache( address, currentBlock, endBlock, targetAdapterId ); + try { + eventCache.addToCache(address, currentBlock, endBlock, targetAdapterId); + } catch (CacheException e) { + log.error("Error occurred while adding to cache: " + e.getMessage()); + hasError = true; + errorMessage = e.getMessage(); + throw e; + } catch (Throwable t) { + log.error("Unexpected error during caching: " + t.getMessage(), t); + hasError = true; + errorMessage = t.getMessage(); + return; + } } currentBlock = endBlock.add( BigInteger.ONE ); // avoid overlapping block numbers @@ -122,6 +138,7 @@ public CachingStatus getStatus() { status.toBlock = toBlock; status.currentBlock = currentBlock; status.currentEndBlock = currentBlock.add(BigInteger.valueOf(batchSizeInBlocks)); + status.sourceAdapterId = sourceAdapterId; if ( currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ).compareTo( toBlock ) > 0 ) { status.percent = 100; @@ -139,6 +156,11 @@ public CachingStatus getStatus() { } } + if (hasError) { + status.state = ProcessingState.ERROR; + status.errorMessage = errorMessage; + } + return status; } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java index 77fa732ee2..c791ce3644 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java @@ -75,6 +75,9 @@ @AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = true, position = 9, modifiable = true) @AdapterSettingInteger(name = "batchSizeInBlocks", description = "Batch size for caching in blocks", defaultValue = 50, position = 10, modifiable = true) @AdapterSettingString(name = "CachingAdapterTargetName", description = "Adapter Target Name", defaultValue = "hsqldb", position = 11, modifiable = true) // todo DL: list +@AdapterSettingBoolean(name = "UseManualABI", description = "Cache event data", defaultValue = false, position = 12, modifiable = true) +@AdapterSettingString(name = "ContractABI", description = "Contract ABI", defaultValue = "", position = 13, modifiable = true) +@AdapterSettingString(name = "ContractName", description = "Contract name", defaultValue = "", position = 14, modifiable = true) public class EthereumDataSource extends DataSource { public static final String SCHEMA_NAME = "public"; @@ -101,6 +104,10 @@ public class EthereumDataSource extends DataSource { private Map> map; + private final boolean useManualABI; + private final String contractABI; + private final String contractName; + public EthereumDataSource( final int storeId, final String uniqueName, final Map settings ) { super( storeId, uniqueName, settings, true ); @@ -120,6 +127,9 @@ public EthereumDataSource( final int storeId, final String uniqueName, final Map this.eventDataMap = new HashMap<>(); this.caching = Boolean.parseBoolean( settings.get( "Caching" ) ); this.cachingAdapterTargetName = settings.get( "CachingAdapterTargetName" ); + this.useManualABI = Boolean.parseBoolean( settings.get( "UseManualABI" ) ); + this.contractABI = settings.get( "ContractABI" ); + this.contractName = settings.get( "ContractName" ); // todo DL new Thread( () -> { createInformationPage(); @@ -326,11 +336,20 @@ private void createExportedColumnsForEvents( Map> m for ( String address : smartContractAddresses ) { String contractName = null; List contractEvents = null; - try { - contractName = callWithExponentialBackoff( () -> getContractName( address ) ); - contractEvents = callWithExponentialBackoff( () -> getEventsFromABI( etherscanApiKey, address ) ); - } catch ( Exception e ) { - throw new RuntimeException( e ); + if ( useManualABI == true && !contractABI.isEmpty() && !this.contractName.isEmpty() ) { + if (smartContractAddresses.size() > 1) { + throw new IllegalArgumentException("Only one smart contract address should be provided when using a manual ABI."); + } + JSONArray abiArray = new JSONArray(contractABI); + contractEvents = getEventsFromABIArray(abiArray); + contractName = this.contractName; + } else { + try { + contractName = callWithExponentialBackoff( () -> getContractName( address ) ); + contractEvents = callWithExponentialBackoff( () -> getEventsFromABI( etherscanApiKey, address ) ); + } catch ( Exception e ) { + throw new RuntimeException( e ); + } } for ( JSONObject event : contractEvents ) { @@ -451,6 +470,21 @@ protected List getEventsFromABI( String etherscanApiKey, String cont return events; } + protected List getEventsFromABIArray(JSONArray abiArray) { + List events = new ArrayList<>(); + + // Loop through the ABI + for (int i = 0; i < abiArray.length(); i++) { + JSONObject item = abiArray.getJSONObject(i); + + // Check if the item is of type 'event' + if (item.has("type") && "event".equals(item.getString("type"))) { + events.add(item); + } + } + + return events; + } private String getContractName( String contractAddress ) { try { diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index b26839a408..d77ccd7090 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -83,22 +83,17 @@ private void addLogsToCache( String address, EventData eventData, BigInteger sta try { EthLog ethLog = web3j.ethGetLogs( filter ).send(); // Get the EthLog response - // todo: show on screen and update - /*if ( startBlock.equals( BigInteger.valueOf( 17669096 ) ) ) { - throw new RuntimeException( "Error fetching logs for block range: " + startBlock + " to " + endBlock ); // just start new caching from startBlock - }*/ - if ( ethLog.hasError() ) { Response.Error error = ethLog.getError(); log.error( "Error fetching logs: " + error.getMessage() ); - throw new RuntimeException( "Error fetching logs for block range: " + startBlock + " to " + endBlock + ". Message: " + error.getMessage() ); // just start new caching from startBlock + throw new CacheException( "Error occurred while fetching logs for block range: " + startBlock + " to " + endBlock + ". Please retry starting from block " + startBlock + " and continue to your intended final block. Error Message: " + error.getMessage() ); } List rawLogs = ethLog.getLogs(); List> structuredLogs = normalizeLogs( event, rawLogs ); cache.put( eventData, structuredLogs ); } catch ( IOException e ) { - throw new RuntimeException( "IO Error fetching logs", e ); + throw new CacheException( "IO Error fetching logs", e ); } } From 71971a82b65357d0765cabb712ca743e7ed23b69 Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Tue, 29 Aug 2023 20:28:30 +0200 Subject: [PATCH 18/22] Add all conversion cases --- .../db/adapter/ethereum/ContractCache.java | 11 - .../adapter/ethereum/EthereumDataSource.java | 49 +- .../adapter/ethereum/EventCacheManager.java | 35 +- .../db/adapter/ethereum/EventData.java | 471 +++++++++++++++++- 4 files changed, 508 insertions(+), 58 deletions(-) diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java index 914bf61c97..e699a49428 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java @@ -16,9 +16,7 @@ package org.polypheny.db.adapter.ethereum; -import java.io.IOException; import java.math.BigInteger; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -27,16 +25,7 @@ import org.polypheny.db.adapter.DataSource.ExportedColumn; import org.polypheny.db.adapter.ethereum.CachingStatus.ProcessingState; import org.polypheny.db.ddl.DdlManager.FieldInformation; -import org.web3j.abi.EventEncoder; -import org.web3j.abi.FunctionReturnDecoder; -import org.web3j.abi.TypeReference; -import org.web3j.abi.datatypes.Event; -import org.web3j.abi.datatypes.Type; import org.web3j.protocol.Web3j; -import org.web3j.protocol.core.DefaultBlockParameter; -import org.web3j.protocol.core.methods.request.EthFilter; -import org.web3j.protocol.core.methods.response.EthLog; -import org.web3j.protocol.core.methods.response.Log; import org.web3j.protocol.http.HttpService; @Slf4j diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java index c791ce3644..572a0282e4 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java @@ -74,10 +74,10 @@ @AdapterSettingString(name = "toBlock", description = "Fetch block to (Smart Contract)", defaultValue = "17669155", position = 8, modifiable = true) @AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = true, position = 9, modifiable = true) @AdapterSettingInteger(name = "batchSizeInBlocks", description = "Batch size for caching in blocks", defaultValue = 50, position = 10, modifiable = true) -@AdapterSettingString(name = "CachingAdapterTargetName", description = "Adapter Target Name", defaultValue = "hsqldb", position = 11, modifiable = true) // todo DL: list +@AdapterSettingString(name = "CachingAdapterTargetName", description = "Adapter Target Name", defaultValue = "hsqldb", position = 11, modifiable = true) @AdapterSettingBoolean(name = "UseManualABI", description = "Cache event data", defaultValue = false, position = 12, modifiable = true) -@AdapterSettingString(name = "ContractABI", description = "Contract ABI", defaultValue = "", position = 13, modifiable = true) -@AdapterSettingString(name = "ContractName", description = "Contract name", defaultValue = "", position = 14, modifiable = true) +@AdapterSettingString(name = "ContractABI", description = "Contract ABI", defaultValue = "", position = 13, modifiable = true, required = false) +@AdapterSettingString(name = "ContractName", description = "Contract name", defaultValue = "", position = 14, modifiable = true, required = false) public class EthereumDataSource extends DataSource { public static final String SCHEMA_NAME = "public"; @@ -337,11 +337,11 @@ private void createExportedColumnsForEvents( Map> m String contractName = null; List contractEvents = null; if ( useManualABI == true && !contractABI.isEmpty() && !this.contractName.isEmpty() ) { - if (smartContractAddresses.size() > 1) { - throw new IllegalArgumentException("Only one smart contract address should be provided when using a manual ABI."); + if ( smartContractAddresses.size() > 1 ) { + throw new IllegalArgumentException( "Only one smart contract address should be provided when using a manual ABI." ); } - JSONArray abiArray = new JSONArray(contractABI); - contractEvents = getEventsFromABIArray(abiArray); + JSONArray abiArray = new JSONArray( contractABI ); + contractEvents = getEventsFromABIArray( abiArray ); contractName = this.contractName; } else { try { @@ -470,22 +470,24 @@ protected List getEventsFromABI( String etherscanApiKey, String cont return events; } - protected List getEventsFromABIArray(JSONArray abiArray) { + + protected List getEventsFromABIArray( JSONArray abiArray ) { List events = new ArrayList<>(); // Loop through the ABI - for (int i = 0; i < abiArray.length(); i++) { - JSONObject item = abiArray.getJSONObject(i); + for ( int i = 0; i < abiArray.length(); i++ ) { + JSONObject item = abiArray.getJSONObject( i ); // Check if the item is of type 'event' - if (item.has("type") && "event".equals(item.getString("type"))) { - events.add(item); + if ( item.has( "type" ) && "event".equals( item.getString( "type" ) ) ) { + events.add( item ); } } return events; } + private String getContractName( String contractAddress ) { try { URL url = new URL( "https://api.etherscan.io/api?module=contract&action=getsourcecode&address=" + contractAddress + "&apikey=" + etherscanApiKey ); @@ -545,6 +547,8 @@ private Integer getLengthForType( PolyType type ) { switch ( type ) { case VARCHAR: return 300; + case VARBINARY: + return 32; default: return null; } @@ -552,17 +556,16 @@ private Integer getLengthForType( PolyType type ) { static PolyType convertToPolyType( String type ) { - // todo: convert all types in evm to polytype - switch ( type ) { - case "bool": - return PolyType.BOOLEAN; - case "address": - return PolyType.VARCHAR; - case "int": // 8 to 256... - case "uint256": - return PolyType.DECIMAL; // todo - default: - return null; + if ( type.equals( "bool" ) ) { + return PolyType.BOOLEAN; + } else if ( type.equals( "address" ) || type.equals( "string" ) ) { + return PolyType.VARCHAR; + } else if ( type.startsWith( "int" ) || type.startsWith( "uint" ) ) { + return PolyType.DECIMAL; + } else if ( type.equals( "bytes" ) || type.startsWith( "bytes" ) ) { + return PolyType.VARCHAR; // for dynamic and fixed-size + } else { + return null; } } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 6ca54efd36..c9d15b7f09 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -61,7 +61,14 @@ import org.polypheny.db.transaction.TransactionException; import org.polypheny.db.transaction.TransactionManager; import org.web3j.abi.datatypes.Address; +import org.web3j.abi.datatypes.Bool; +import org.web3j.abi.datatypes.Bytes; +import org.web3j.abi.datatypes.DynamicBytes; +import org.web3j.abi.datatypes.Int; +import org.web3j.abi.datatypes.Uint; +import org.web3j.abi.datatypes.generated.Bytes32; import org.web3j.abi.datatypes.generated.Uint256; +import org.web3j.utils.Numeric; @Slf4j @@ -184,14 +191,7 @@ void writeToStore( String tableName, List> logResults, int targetAd List fieldValues = new ArrayList<>(); for ( List logResult : logResults ) { Object value = logResult.get( i ); - // todo: converting to long (-2^63-1 till 2^63-1) from uint256 (2^256-1) if data is greater than 2^63-1 - // how to convert it to bigint? Is there a Poltype that can handle unit256? Double? Evtl. Decimal? - // is Bigint 64-bit signed integer? - if ( value instanceof Address ) { - value = value.toString(); - } else if ( value instanceof Uint256 ) { - value = ((Uint256) value).getValue() == null ? null : new BigDecimal(((Uint256) value).getValue()); - } + value = convertValueBasedOnType(value); fieldValues.add( value ); } i++; @@ -217,6 +217,25 @@ protected Map getAllStreamStatus() { return caches.values().stream().collect( Collectors.toMap( c -> c.sourceAdapterId, ContractCache::getStatus ) ); } + private Object convertValueBasedOnType(Object value) { + if (value instanceof Address) { + return value.toString(); + } else if (value instanceof Bool) { + return ((Bool) value).getValue(); + } else if (value instanceof DynamicBytes) { + return ((DynamicBytes) value).getValue().toString(); + } else if (value instanceof Bytes) { + return value.toString(); + } else if (value instanceof Uint) { // Similarly for Uint and its subclasses + BigInteger bigIntValue = ((Uint) value).getValue(); + return bigIntValue == null ? null : new BigDecimal(bigIntValue); + } else if (value instanceof Int) { // Similarly for Int and its subclasses + BigInteger bigIntValue = ((Int) value).getValue(); + return bigIntValue == null ? null : new BigDecimal(bigIntValue); + } + return value; // return the original value if none of the conditions match + } + @Override public void run() { diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java index 793b8332f9..c0af0864ed 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java @@ -16,26 +16,119 @@ package org.polypheny.db.adapter.ethereum; -import static org.web3j.abi.Utils.convert; - import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; import lombok.Getter; import org.json.JSONArray; import org.json.JSONObject; import org.web3j.abi.TypeReference; import org.web3j.abi.datatypes.Address; +import org.web3j.abi.datatypes.Bool; +import org.web3j.abi.datatypes.DynamicBytes; import org.web3j.abi.datatypes.Event; -import org.web3j.abi.datatypes.Type; +import org.web3j.abi.datatypes.Utf8String; +import org.web3j.abi.datatypes.generated.Uint8; +import org.web3j.abi.datatypes.generated.Uint16; +import org.web3j.abi.datatypes.generated.Uint24; +import org.web3j.abi.datatypes.generated.Uint32; +import org.web3j.abi.datatypes.generated.Uint40; +import org.web3j.abi.datatypes.generated.Uint48; +import org.web3j.abi.datatypes.generated.Uint56; +import org.web3j.abi.datatypes.generated.Uint64; +import org.web3j.abi.datatypes.generated.Uint72; +import org.web3j.abi.datatypes.generated.Uint80; +import org.web3j.abi.datatypes.generated.Uint88; +import org.web3j.abi.datatypes.generated.Uint96; +import org.web3j.abi.datatypes.generated.Uint104; +import org.web3j.abi.datatypes.generated.Uint112; +import org.web3j.abi.datatypes.generated.Uint120; +import org.web3j.abi.datatypes.generated.Uint128; +import org.web3j.abi.datatypes.generated.Uint136; +import org.web3j.abi.datatypes.generated.Uint144; +import org.web3j.abi.datatypes.generated.Uint152; +import org.web3j.abi.datatypes.generated.Uint160; +import org.web3j.abi.datatypes.generated.Uint168; +import org.web3j.abi.datatypes.generated.Uint176; +import org.web3j.abi.datatypes.generated.Uint184; +import org.web3j.abi.datatypes.generated.Uint192; +import org.web3j.abi.datatypes.generated.Uint200; +import org.web3j.abi.datatypes.generated.Uint208; +import org.web3j.abi.datatypes.generated.Uint216; +import org.web3j.abi.datatypes.generated.Uint224; +import org.web3j.abi.datatypes.generated.Uint232; +import org.web3j.abi.datatypes.generated.Uint240; +import org.web3j.abi.datatypes.generated.Uint248; import org.web3j.abi.datatypes.generated.Uint256; +import org.web3j.abi.datatypes.generated.Int8; +import org.web3j.abi.datatypes.generated.Int16; +import org.web3j.abi.datatypes.generated.Int24; +import org.web3j.abi.datatypes.generated.Int32; +import org.web3j.abi.datatypes.generated.Int40; +import org.web3j.abi.datatypes.generated.Int48; +import org.web3j.abi.datatypes.generated.Int56; +import org.web3j.abi.datatypes.generated.Int64; +import org.web3j.abi.datatypes.generated.Int72; +import org.web3j.abi.datatypes.generated.Int80; +import org.web3j.abi.datatypes.generated.Int88; +import org.web3j.abi.datatypes.generated.Int96; +import org.web3j.abi.datatypes.generated.Int104; +import org.web3j.abi.datatypes.generated.Int112; +import org.web3j.abi.datatypes.generated.Int120; +import org.web3j.abi.datatypes.generated.Int128; +import org.web3j.abi.datatypes.generated.Int136; +import org.web3j.abi.datatypes.generated.Int144; +import org.web3j.abi.datatypes.generated.Int152; +import org.web3j.abi.datatypes.generated.Int160; +import org.web3j.abi.datatypes.generated.Int168; +import org.web3j.abi.datatypes.generated.Int176; +import org.web3j.abi.datatypes.generated.Int184; +import org.web3j.abi.datatypes.generated.Int192; +import org.web3j.abi.datatypes.generated.Int200; +import org.web3j.abi.datatypes.generated.Int208; +import org.web3j.abi.datatypes.generated.Int216; +import org.web3j.abi.datatypes.generated.Int224; +import org.web3j.abi.datatypes.generated.Int232; +import org.web3j.abi.datatypes.generated.Int240; +import org.web3j.abi.datatypes.generated.Int248; +import org.web3j.abi.datatypes.generated.Int256; +import org.web3j.abi.datatypes.generated.Bytes1; +import org.web3j.abi.datatypes.generated.Bytes2; +import org.web3j.abi.datatypes.generated.Bytes3; +import org.web3j.abi.datatypes.generated.Bytes4; +import org.web3j.abi.datatypes.generated.Bytes5; +import org.web3j.abi.datatypes.generated.Bytes6; +import org.web3j.abi.datatypes.generated.Bytes7; +import org.web3j.abi.datatypes.generated.Bytes8; +import org.web3j.abi.datatypes.generated.Bytes9; +import org.web3j.abi.datatypes.generated.Bytes10; +import org.web3j.abi.datatypes.generated.Bytes11; +import org.web3j.abi.datatypes.generated.Bytes12; +import org.web3j.abi.datatypes.generated.Bytes13; +import org.web3j.abi.datatypes.generated.Bytes14; +import org.web3j.abi.datatypes.generated.Bytes15; +import org.web3j.abi.datatypes.generated.Bytes16; +import org.web3j.abi.datatypes.generated.Bytes17; +import org.web3j.abi.datatypes.generated.Bytes18; +import org.web3j.abi.datatypes.generated.Bytes19; +import org.web3j.abi.datatypes.generated.Bytes20; +import org.web3j.abi.datatypes.generated.Bytes21; +import org.web3j.abi.datatypes.generated.Bytes22; +import org.web3j.abi.datatypes.generated.Bytes23; +import org.web3j.abi.datatypes.generated.Bytes24; +import org.web3j.abi.datatypes.generated.Bytes25; +import org.web3j.abi.datatypes.generated.Bytes26; +import org.web3j.abi.datatypes.generated.Bytes27; +import org.web3j.abi.datatypes.generated.Bytes28; +import org.web3j.abi.datatypes.generated.Bytes29; +import org.web3j.abi.datatypes.generated.Bytes30; +import org.web3j.abi.datatypes.generated.Bytes31; +import org.web3j.abi.datatypes.generated.Bytes32; + -// TODO: extend EventData with Data public class EventData { @Getter private String originalKey; - private String lowercaseKey; @Getter private Event event; @Getter @@ -48,7 +141,6 @@ public class EventData { public EventData( String originalKey, String contractName, String smartContractAddress, JSONArray abiInputs ) { this.originalKey = originalKey; - this.lowercaseKey = originalKey.toLowerCase(); this.compositeName = contractName.toLowerCase() + "_" + originalKey.toLowerCase(); this.abiInputs = abiInputs; List> typeReferences = createTypeReferences( abiInputs ); @@ -63,18 +155,365 @@ private static List> createTypeReferences( JSONArray abiInputs JSONObject inputObject = abiInputs.getJSONObject( i ); String type = inputObject.getString( "type" ); boolean indexed = inputObject.getBoolean( "indexed" ); - if ( type.equals( "address" ) ) { - typeReferences.add( indexed ? new TypeReference
( indexed ) { - } : new TypeReference
( false ) { - } ); - // typeReferences.add( new TypeReference
( indexed ) ); - } else if ( type.equals( "uint256" ) ) { - typeReferences.add( indexed ? new TypeReference( true ) { - } : new TypeReference( false ) { - } ); + + switch ( type ) { + case "address": + typeReferences.add( new TypeReference
( indexed ) { + } ); + break; + case "bool": + typeReferences.add( new TypeReference( indexed ) { + } ); + break; + case "string": + typeReferences.add( new TypeReference( indexed ) { + } ); + break; + case "unit": + typeReferences.add( new TypeReference( indexed ) { + } ); + break; + case "int": + typeReferences.add( new TypeReference( indexed ) { + } ); + break; + case "bytes": // dynamic-sized byte array + typeReferences.add( new TypeReference( indexed ) { + } ); + break; + default: + if ( type.startsWith( "uint" ) ) { + int bitSize = Integer.parseInt( type.substring( 4 ) ); // Get the bit size, e.g., 8 from uint8 + typeReferences.add( createUintTypeReference( bitSize, indexed ) ); + } else if ( type.startsWith( "int" ) ) { + int bitSize = Integer.parseInt( type.substring( 4 ) ); // Get the bit size, e.g., 8 from int8 + typeReferences.add( createIntTypeReference( bitSize, indexed ) ); + } else if ( type.startsWith( "bytes" ) && !type.equals( "bytes" ) ) { // fixed-sized byte array + int size = Integer.parseInt( type.substring( 5 ) ); // Get size, e.g., 1 from bytes1 + typeReferences.add( createBytesTypeReference( size, indexed ) ); + } + break; } } + return typeReferences; + + } + + + private static TypeReference createUintTypeReference( int bitSize, boolean indexed ) { + switch ( bitSize ) { + case 8: + return new TypeReference( indexed ) { + }; + case 16: + return new TypeReference( indexed ) { + }; + case 24: + return new TypeReference( indexed ) { + }; + case 32: + return new TypeReference( indexed ) { + }; + case 40: + return new TypeReference( indexed ) { + }; + case 48: + return new TypeReference( indexed ) { + }; + case 56: + return new TypeReference( indexed ) { + }; + case 64: + return new TypeReference( indexed ) { + }; + case 72: + return new TypeReference( indexed ) { + }; + case 80: + return new TypeReference( indexed ) { + }; + case 88: + return new TypeReference( indexed ) { + }; + case 96: + return new TypeReference( indexed ) { + }; + case 104: + return new TypeReference( indexed ) { + }; + case 112: + return new TypeReference( indexed ) { + }; + case 120: + return new TypeReference( indexed ) { + }; + case 128: + return new TypeReference( indexed ) { + }; + case 136: + return new TypeReference( indexed ) { + }; + case 144: + return new TypeReference( indexed ) { + }; + case 152: + return new TypeReference( indexed ) { + }; + case 160: + return new TypeReference( indexed ) { + }; + case 168: + return new TypeReference( indexed ) { + }; + case 176: + return new TypeReference( indexed ) { + }; + case 184: + return new TypeReference( indexed ) { + }; + case 192: + return new TypeReference( indexed ) { + }; + case 200: + return new TypeReference( indexed ) { + }; + case 208: + return new TypeReference( indexed ) { + }; + case 216: + return new TypeReference( indexed ) { + }; + case 224: + return new TypeReference( indexed ) { + }; + case 232: + return new TypeReference( indexed ) { + }; + case 240: + return new TypeReference( indexed ) { + }; + case 248: + return new TypeReference( indexed ) { + }; + case 256: + return new TypeReference( indexed ) { + }; + default: + throw new IllegalArgumentException( "Unsupported bit size: " + bitSize ); + } + } + + + private static TypeReference createIntTypeReference( int bitSize, boolean indexed ) { + switch ( bitSize ) { + case 8: + return new TypeReference( indexed ) { + }; + case 16: + return new TypeReference( indexed ) { + }; + case 24: + return new TypeReference( indexed ) { + }; + case 32: + return new TypeReference( indexed ) { + }; + case 40: + return new TypeReference( indexed ) { + }; + case 48: + return new TypeReference( indexed ) { + }; + case 56: + return new TypeReference( indexed ) { + }; + case 64: + return new TypeReference( indexed ) { + }; + case 72: + return new TypeReference( indexed ) { + }; + case 80: + return new TypeReference( indexed ) { + }; + case 88: + return new TypeReference( indexed ) { + }; + case 96: + return new TypeReference( indexed ) { + }; + case 104: + return new TypeReference( indexed ) { + }; + case 112: + return new TypeReference( indexed ) { + }; + case 120: + return new TypeReference( indexed ) { + }; + case 128: + return new TypeReference( indexed ) { + }; + case 136: + return new TypeReference( indexed ) { + }; + case 144: + return new TypeReference( indexed ) { + }; + case 152: + return new TypeReference( indexed ) { + }; + case 160: + return new TypeReference( indexed ) { + }; + case 168: + return new TypeReference( indexed ) { + }; + case 176: + return new TypeReference( indexed ) { + }; + case 184: + return new TypeReference( indexed ) { + }; + case 192: + return new TypeReference( indexed ) { + }; + case 200: + return new TypeReference( indexed ) { + }; + case 208: + return new TypeReference( indexed ) { + }; + case 216: + return new TypeReference( indexed ) { + }; + case 224: + return new TypeReference( indexed ) { + }; + case 232: + return new TypeReference( indexed ) { + }; + case 240: + return new TypeReference( indexed ) { + }; + case 248: + return new TypeReference( indexed ) { + }; + case 256: + return new TypeReference( indexed ) { + }; + default: + throw new IllegalArgumentException( "Unsupported bit size: " + bitSize ); + } + } + + + private static TypeReference createBytesTypeReference( int size, boolean indexed ) { + switch ( size ) { + case 1: + return new TypeReference( indexed ) { + }; + case 2: + return new TypeReference( indexed ) { + }; + case 3: + return new TypeReference( indexed ) { + }; + case 4: + return new TypeReference( indexed ) { + }; + case 5: + return new TypeReference( indexed ) { + }; + case 6: + return new TypeReference( indexed ) { + }; + case 7: + return new TypeReference( indexed ) { + }; + case 8: + return new TypeReference( indexed ) { + }; + case 9: + return new TypeReference( indexed ) { + }; + case 10: + return new TypeReference( indexed ) { + }; + case 11: + return new TypeReference( indexed ) { + }; + case 12: + return new TypeReference( indexed ) { + }; + case 13: + return new TypeReference( indexed ) { + }; + case 14: + return new TypeReference( indexed ) { + }; + case 15: + return new TypeReference( indexed ) { + }; + case 16: + return new TypeReference( indexed ) { + }; + case 17: + return new TypeReference( indexed ) { + }; + case 18: + return new TypeReference( indexed ) { + }; + case 19: + return new TypeReference( indexed ) { + }; + case 20: + return new TypeReference( indexed ) { + }; + case 21: + return new TypeReference( indexed ) { + }; + case 22: + return new TypeReference( indexed ) { + }; + case 23: + return new TypeReference( indexed ) { + }; + case 24: + return new TypeReference( indexed ) { + }; + case 25: + return new TypeReference( indexed ) { + }; + case 26: + return new TypeReference( indexed ) { + }; + case 27: + return new TypeReference( indexed ) { + }; + case 28: + return new TypeReference( indexed ) { + }; + case 29: + return new TypeReference( indexed ) { + }; + case 30: + return new TypeReference( indexed ) { + }; + case 31: + return new TypeReference( indexed ) { + }; + case 32: + return new TypeReference( indexed ) { + }; + default: + throw new IllegalArgumentException( "Size not supported for Bytes type." ); + } + } } + + + From d19f16f1af772a5437ef2dd4d9539d39fc700629 Mon Sep 17 00:00:00 2001 From: datomo Date: Fri, 1 Sep 2023 17:03:55 +0200 Subject: [PATCH 19/22] added caching rule, not yet working --- .../org/polypheny/db/adapter/Adapter.java | 2 + .../db/algebra/rules/CacheSwitcherRule.java | 54 +++++++++++++++++++ .../org/polypheny/db/catalog/Catalog.java | 5 +- .../db/catalog/entity/CatalogTable.java | 24 +++++++-- .../db/catalog/entity/CatalogView.java | 2 +- .../java/org/polypheny/db/ddl/DdlManager.java | 4 +- .../db/plan/volcano/VolcanoPlanner.java | 2 + .../org/polypheny/db/catalog/MockCatalog.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 8 +-- .../db/adapter/ethereum/ContractCache.java | 3 +- .../adapter/ethereum/EthereumDataSource.java | 4 +- .../db/adapter/ethereum/EthereumPlugin.java | 3 -- .../adapter/ethereum/EventCacheManager.java | 7 +-- .../org/polypheny/db/catalog/CatalogImpl.java | 45 +++++++++------- .../org/polypheny/db/test/CatalogTest.java | 12 ++--- .../db/sql/language/ddl/SqlCreateTable.java | 2 +- .../db/sql/map/SchemaToJsonMapperTest.java | 2 +- 17 files changed, 132 insertions(+), 49 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/algebra/rules/CacheSwitcherRule.java diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 0cb0215400..41cd8b7f42 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -81,6 +81,8 @@ public abstract class Adapter { @Getter private final String adapterName; + public boolean canCache = false; + @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/CacheSwitcherRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/CacheSwitcherRule.java new file mode 100644 index 0000000000..b896b07b0a --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/rules/CacheSwitcherRule.java @@ -0,0 +1,54 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.rules; + +import java.util.List; +import org.polypheny.db.algebra.logical.relational.LogicalScan; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.plan.AlgOptRule; +import org.polypheny.db.plan.AlgOptRuleCall; +import org.polypheny.db.plan.AlgOptRuleOperand; +import org.polypheny.db.plan.AlgOptTable; + +public class CacheSwitcherRule extends AlgOptRule { + + public static CacheSwitcherRule INSTANCE = new CacheSwitcherRule( operand( LogicalScan.class, none() ) ); + + + public CacheSwitcherRule( AlgOptRuleOperand operand ) { + super( operand ); + } + + + @Override + public void onMatch( AlgOptRuleCall call ) { + LogicalScan scan = call.alg( 0 ); + Long id = scan.getTable().getTable().getTableId(); + if ( id == null ) { + return; + } + + CatalogTable table = Catalog.getInstance().getTable( id ); + if ( !table.cached ) { + return; + } + String adjustedName = Catalog.HIDDEN_PREFIX + table.name; + AlgOptTable cache = scan.getTable().getRelOptSchema().getTableForMember( List.of( table.getNamespaceName(), table.getNamespaceName(), adjustedName ) ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/Catalog.java b/core/src/main/java/org/polypheny/db/catalog/Catalog.java index 3bb729294f..fe6ea7fa5d 100644 --- a/core/src/main/java/org/polypheny/db/catalog/Catalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/Catalog.java @@ -98,6 +98,7 @@ public abstract class Catalog implements ExtensionPoint { + public static final String HIDDEN_PREFIX = "__hidden__"; public static Adapter defaultStore; public static Adapter defaultSource; public static int defaultUserId = 0; @@ -490,9 +491,11 @@ protected final boolean isValidIdentifier( final String str ) { * @param ownerId The if of the owner * @param entityType The table type * @param modifiable Whether the content of the table can be modified + * @param cached + * @param hidden * @return The id of the inserted table */ - public abstract long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ); + public abstract long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, boolean cached, boolean hidden ); /** diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java index b15509935b..6a27c7960e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogTable.java @@ -53,6 +53,8 @@ public class CatalogTable implements CatalogObject, Comparable { @Getter public final ImmutableList connectedViews; + public final boolean cached; + public final boolean hidden; public CatalogTable( @@ -66,7 +68,9 @@ public CatalogTable( final Long primaryKey, @NonNull final ImmutableList dataPlacements, boolean modifiable, - PartitionProperty partitionProperty ) { + PartitionProperty partitionProperty, + boolean cached, + boolean hidden ) { this.id = id; this.name = name; this.fieldIds = fieldIds; @@ -76,6 +80,8 @@ public CatalogTable( this.entityType = type; this.primaryKey = primaryKey; this.modifiable = modifiable; + this.cached = cached; + this.hidden = hidden; this.partitionProperty = partitionProperty; this.connectedViews = ImmutableList.of(); @@ -100,7 +106,9 @@ public CatalogTable( @NonNull final ImmutableList dataPlacements, boolean modifiable, PartitionProperty partitionProperty, - ImmutableList connectedViews ) { + ImmutableList connectedViews, + boolean cached, + boolean hidden ) { this.id = id; this.name = name; this.fieldIds = fieldIds; @@ -116,6 +124,8 @@ public CatalogTable( this.connectedViews = connectedViews; this.dataPlacements = ImmutableList.copyOf( dataPlacements ); + this.cached = cached; + this.hidden = hidden; if ( type == EntityType.ENTITY && !modifiable ) { throw new RuntimeException( "Tables of table type TABLE must be modifiable!" ); @@ -220,7 +230,7 @@ public CatalogTable getRenamed( String newName ) { dataPlacements, modifiable, partitionProperty, - connectedViews ); + connectedViews, false, false ); } @@ -237,7 +247,9 @@ public CatalogTable getConnectedViews( ImmutableList newConnectedViews ) { dataPlacements, modifiable, partitionProperty, - newConnectedViews ); + newConnectedViews, + cached, + hidden ); } @@ -254,7 +266,9 @@ public CatalogTable getTableWithColumns( ImmutableList newColumnIds ) { dataPlacements, modifiable, partitionProperty, - connectedViews ); + connectedViews, + cached, + hidden ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java index 776018b7df..05559f0a0c 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/CatalogView.java @@ -65,7 +65,7 @@ public CatalogView( ImmutableMap> underlyingTables, String language ) { super( id, name, columnIds, schemaId, databaseId, ownerId, entityType, primaryKey, dataPlacements, - modifiable, partitionProperty, connectedViews ); + modifiable, partitionProperty, connectedViews, false, false ); this.query = query; this.algCollation = algCollation; this.underlyingTables = underlyingTables; diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 0e6741415f..a5bb969969 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -449,9 +449,11 @@ public static DdlManager getInstance() { * @param ifNotExists whether to silently ignore if the table already exists * @param stores list of data stores on which to create a full placement for this table * @param placementType which placement type should be used for the initial placements + * @param cached * @param statement the used statement + * @param hidden */ - public abstract void createTable( long schemaId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; + public abstract void createTable( long schemaId, String tableName, List columns, List constraints, boolean ifNotExists, List stores, PlacementType placementType, boolean cached, Statement statement, boolean hidden ) throws EntityAlreadyExistsException, ColumnNotExistsException, UnknownPartitionTypeException, UnknownColumnException, PartitionGroupNamesNotUniqueException; /** * Create a new view diff --git a/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java b/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java index e6abfe2f19..a1921368ab 100644 --- a/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java +++ b/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java @@ -71,6 +71,7 @@ import org.polypheny.db.algebra.rules.AggregateJoinTransposeRule; import org.polypheny.db.algebra.rules.AggregateProjectMergeRule; import org.polypheny.db.algebra.rules.AggregateRemoveRule; +import org.polypheny.db.algebra.rules.CacheSwitcherRule; import org.polypheny.db.algebra.rules.CalcRemoveRule; import org.polypheny.db.algebra.rules.DocumentToEnumerableRule; import org.polypheny.db.algebra.rules.FilterJoinRule; @@ -850,6 +851,7 @@ public void registerAbstractRelationalRules() { //addRule( ProjectRemoveRule.INSTANCE ); addRule( AggregateJoinTransposeRule.INSTANCE ); addRule( AggregateProjectMergeRule.INSTANCE ); + addRule( CacheSwitcherRule.INSTANCE ); addRule( CalcRemoveRule.INSTANCE ); addRule( SortRemoveRule.INSTANCE ); diff --git a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java index e44764358a..d025ef2fff 100644 --- a/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java +++ b/core/src/test/java/org/polypheny/db/catalog/MockCatalog.java @@ -348,7 +348,7 @@ public CatalogTable getTable( long databaseId, String schemaName, String tableNa @Override - public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { + public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, boolean cached, boolean hidden ) { throw new NotImplementedException(); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 3d50552f17..2a87dfb9b0 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -258,7 +258,7 @@ public void addAdapter( String uniqueName, String adapterName, AdapterType adapt tableName += i; } - long tableId = catalog.addTable( tableName, 1, 1, EntityType.SOURCE, !((DataSource) adapter).isDataReadOnly() ); + long tableId = catalog.addTable( tableName, 1, 1, EntityType.SOURCE, !((DataSource) adapter).isDataReadOnly(), adapter.canCache, false ); List primaryKeyColIds = new ArrayList<>(); int colPos = 1; String physicalSchemaName = null; @@ -2172,7 +2172,7 @@ private List getUnderlyingColumns( AlgNode algNode, AlgDataType fieldList @Override - public void createTable( long schemaId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, Statement statement ) throws EntityAlreadyExistsException { + public void createTable( long schemaId, String name, List fields, List constraints, boolean ifNotExists, List stores, PlacementType placementType, boolean cached, Statement statement, boolean hidden ) throws EntityAlreadyExistsException { name = adjustNameIfNeeded( name, schemaId ); try { @@ -2210,7 +2210,9 @@ public void createTable( long schemaId, String name, List fiel schemaId, statement.getPrepareContext().getCurrentUserId(), EntityType.ENTITY, - true ); + true, + cached, + false ); // Initially create DataPlacement containers on every store the table should be placed. stores.forEach( store -> catalog.addDataPlacement( store.getAdapterId(), tableId ) ); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java index e699a49428..9ac873289c 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java @@ -24,6 +24,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DataSource.ExportedColumn; import org.polypheny.db.adapter.ethereum.CachingStatus.ProcessingState; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.ddl.DdlManager.FieldInformation; import org.web3j.protocol.Web3j; import org.web3j.protocol.http.HttpService; @@ -75,7 +76,7 @@ private void createSchema() { .stream() .collect( Collectors.toMap( - table -> EthereumPlugin.HIDDEN_PREFIX + "__" + targetAdapterId + "__" + table.getKey(), // we prepend this to hide the table to the user + table -> Catalog.HIDDEN_PREFIX + table.getKey(), // we prepend this to hide the table to the user table -> table.getValue() .stream() .map( ExportedColumn::toFieldInformation ) diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java index 572a0282e4..96534bd153 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java @@ -111,15 +111,15 @@ public class EthereumDataSource extends DataSource { public EthereumDataSource( final int storeId, final String uniqueName, final Map settings ) { super( storeId, uniqueName, settings, true ); + this.canCache = true; setClientURL( settings.get( "ClientUrl" ) ); this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); this.eventDataRetrieval = Boolean.parseBoolean( settings.get( "EventDataRetrieval" ) ); String smartContractAddressesStr = settings.get( "SmartContractAddresses" ); - List smartContractAddresses = Arrays.stream( smartContractAddressesStr.split( "," ) ) + this.smartContractAddresses = Arrays.stream( smartContractAddressesStr.split( "," ) ) .map( String::trim ) .collect( Collectors.toList() ); - this.smartContractAddresses = smartContractAddresses; this.etherscanApiKey = settings.get( "EtherscanApiKey" ); this.fromBlock = new BigInteger( settings.get( "fromBlock" ) ); this.toBlock = new BigInteger( settings.get( "toBlock" ) ); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index 4efcee9f53..8a836d8888 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -18,7 +18,6 @@ import com.google.common.collect.ImmutableMap; -import org.polypheny.db.webui.HttpServer; import java.util.Map; import org.pf4j.Plugin; import org.pf4j.PluginWrapper; @@ -31,8 +30,6 @@ public class EthereumPlugin extends Plugin { public static final String ADAPTER_NAME = "ETHEREUM"; - public static final String HIDDEN_PREFIX = "__hidden__"; - /** * Constructor to be used by plugin manager for plugin instantiation. diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index c9d15b7f09..078ae499dc 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -66,9 +66,6 @@ import org.web3j.abi.datatypes.DynamicBytes; import org.web3j.abi.datatypes.Int; import org.web3j.abi.datatypes.Uint; -import org.web3j.abi.datatypes.generated.Bytes32; -import org.web3j.abi.datatypes.generated.Uint256; -import org.web3j.utils.Numeric; @Slf4j @@ -135,7 +132,7 @@ void createTables( int sourceAdapterId, Map> tabl // For each table, a new table is created with their constraint (e.g., a primary key). for ( Entry> table : tableInformations.entrySet() ) { ConstraintInformation primaryConstraint = new ConstraintInformation( table.getKey() + "primary", ConstraintType.PRIMARY, List.of( "log_index", "transaction_index", "block_number" ) ); - DdlManager.getInstance().createTable( namespaceId, table.getKey(), table.getValue(), List.of( primaryConstraint ), false, List.of( store ), PlacementType.AUTOMATIC, transaction.createStatement() ); + DdlManager.getInstance().createTable( namespaceId, table.getKey(), table.getValue(), List.of( primaryConstraint ), false, List.of( store ), PlacementType.AUTOMATIC, false, transaction.createStatement(), false ); } try { @@ -170,7 +167,7 @@ void writeToStore( String tableName, List> logResults, int targetAd // TableEntry table = transaction.getSchema().getTable( EthereumPlugin.HIDDEN_PREFIX + tableName ); AlgOptSchema algOptSchema = transaction.getCatalogReader(); - AlgOptTable table = algOptSchema.getTableForMember( Collections.singletonList( EthereumPlugin.HIDDEN_PREFIX + "__" + targetAdapterId + "__" + tableName ) ); + AlgOptTable table = algOptSchema.getTableForMember( Collections.singletonList( Catalog.HIDDEN_PREFIX + tableName ) ); AlgDataType rowType = table.getTable().getRowType( transaction.getTypeFactory() ); builder.push( LogicalValues.createOneRow( builder.getCluster() ) ); diff --git a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java index 9b24309a30..50b41cd50e 100644 --- a/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java +++ b/plugins/mapdb-catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java @@ -790,16 +790,16 @@ private void insertDefaultData() throws GenericCatalogException, UnknownUserExce CatalogAdapter csv = getAdapter( "hr" ); if ( !testMode ) { if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "depts" } ) ) { - addTable( "depts", schemaId, systemId, EntityType.SOURCE, false ); + addTable( "depts", schemaId, systemId, EntityType.SOURCE, false, false, false ); } if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "emps" } ) ) { - addTable( "emps", schemaId, systemId, EntityType.SOURCE, false ); + addTable( "emps", schemaId, systemId, EntityType.SOURCE, false, false, false ); } if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "emp" } ) ) { - addTable( "emp", schemaId, systemId, EntityType.SOURCE, false ); + addTable( "emp", schemaId, systemId, EntityType.SOURCE, false, false, false ); } if ( !tableNames.containsKey( new Object[]{ databaseId, schemaId, "work" } ) ) { - addTable( "work", schemaId, systemId, EntityType.SOURCE, false ); + addTable( "work", schemaId, systemId, EntityType.SOURCE, false, false, false ); addDefaultCsvColumns( csv ); } } @@ -1285,7 +1285,7 @@ public void addGraphLogistics( long id, List stores, boolean onlyPlac // table id nodes -> id, node, labels long nodesId; if ( !onlyPlacement ) { - nodesId = addTable( "_nodes_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + nodesId = addTable( "_nodes_", id, Catalog.defaultUserId, EntityType.ENTITY, true, false, false ); } else { nodesId = getTable( id, "_nodes_" ).id; } @@ -1331,7 +1331,7 @@ public void addGraphLogistics( long id, List stores, boolean onlyPlac // table id nodes -> id, node, labels long nodesPropertyId; if ( !onlyPlacement ) { - nodesPropertyId = addTable( "_n_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + nodesPropertyId = addTable( "_n_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true, false, false ); } else { nodesPropertyId = getTable( id, "_n_properties_" ).id; } @@ -1390,7 +1390,7 @@ public void addGraphLogistics( long id, List stores, boolean onlyPlac // table id relationships -> id, rel, labels long edgesId; if ( !onlyPlacement ) { - edgesId = addTable( "_edges_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + edgesId = addTable( "_edges_", id, Catalog.defaultUserId, EntityType.ENTITY, true, false, false ); } else { edgesId = getTable( id, "_edges_" ).id; } @@ -1505,7 +1505,7 @@ public void addGraphLogistics( long id, List stores, boolean onlyPlac // table id nodes -> id, node, labels long edgesPropertyId; if ( !onlyPlacement ) { - edgesPropertyId = addTable( "_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true ); + edgesPropertyId = addTable( "_properties_", id, Catalog.defaultUserId, EntityType.ENTITY, true, false, false ); } else { edgesPropertyId = getTable( id, "_properties_" ).id; } @@ -1850,7 +1850,7 @@ public CatalogTable getTable( String databaseName, String schemaName, String tab * {@inheritDoc} */ @Override - public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable ) { + public long addTable( String name, long namespaceId, int ownerId, EntityType entityType, boolean modifiable, boolean cached, boolean hidden ) { long id = entityIdBuilder.getAndIncrement(); CatalogSchema schema = getSchema( namespaceId ); if ( !schema.caseSensitive ) { @@ -1884,7 +1884,9 @@ public long addTable( String name, long namespaceId, int ownerId, EntityType ent ImmutableList.of(), modifiable, partitionProperty, - ImmutableList.of() ); + ImmutableList.of(), + cached, + hidden ); updateEntityLogistics( name, namespaceId, id, schema, table ); if ( schema.namespaceType != NamespaceType.DOCUMENT ) { @@ -2202,7 +2204,9 @@ public void setTableOwner( long tableId, int ownerId ) { old.dataPlacements, old.modifiable, old.partitionProperty, - old.connectedViews ); + old.connectedViews, + old.cached, + old.hidden ); } synchronized ( this ) { @@ -2255,7 +2259,10 @@ public void setPrimaryKey( long tableId, Long keyId ) { keyId, old.dataPlacements, old.modifiable, - old.partitionProperty, old.connectedViews ); + old.partitionProperty, + old.connectedViews, + old.cached, + old.hidden ); } synchronized ( this ) { @@ -2503,7 +2510,7 @@ public long addCollectionLogistics( long schemaId, String name, List throw new RuntimeException( e ); } } else { - tableId = addTable( name, schemaId, Catalog.defaultUserId, EntityType.ENTITY, true ); + tableId = addTable( name, schemaId, Catalog.defaultUserId, EntityType.ENTITY, true, false, false ); } stores.forEach( store -> addDataPlacement( store.getAdapterId(), tableId ) ); @@ -3274,7 +3281,7 @@ public void deleteColumn( long columnId ) { old.dataPlacements, old.modifiable, old.partitionProperty, - old.connectedViews ); + old.connectedViews, false, false ); } synchronized ( this ) { columnNames.remove( new Object[]{ column.databaseId, column.schemaId, column.tableId, column.name } ); @@ -4293,7 +4300,7 @@ public void partitionTable( long tableId, PartitionType partitionType, long part old.dataPlacements, old.modifiable, partitionProperty, - old.connectedViews ); + old.connectedViews, false, false ); synchronized ( this ) { tables.replace( tableId, table ); @@ -4349,7 +4356,7 @@ public void mergeTable( long tableId ) { old.dataPlacements, old.modifiable, partitionProperty, - old.connectedViews ); + old.connectedViews, false, false ); synchronized ( this ) { tables.replace( tableId, table ); @@ -4378,7 +4385,7 @@ public void updateTablePartitionProperties( long tableId, PartitionProperty part old.dataPlacements, old.modifiable, partitionProperty, - old.connectedViews ); + old.connectedViews, false, false ); synchronized ( this ) { tables.replace( tableId, table ); @@ -4852,7 +4859,9 @@ public void updateDataPlacementsOnTable( long tableId, List newDataPlac ImmutableList.copyOf( newDataPlacements ), old.modifiable, old.partitionProperty, - old.connectedViews ); + old.connectedViews, + old.cached, + old.hidden ); } synchronized ( this ) { diff --git a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java index 6cdd9bea5c..a0e64426de 100644 --- a/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java +++ b/plugins/mapdb-catalog/src/test/java/org/polypheny/db/test/CatalogTest.java @@ -97,7 +97,7 @@ public void testLayout() throws UnknownDatabaseException, UnknownSchemaException CatalogSchema schema = catalog.getSchema( databaseId, "test_schema" ); assertEquals( schemaId, schema.id ); - long tableId = catalog.addTable( "test_table", schemaId, userId, EntityType.ENTITY, true ); + long tableId = catalog.addTable( "test_table", schemaId, userId, EntityType.ENTITY, true, false, false ); CatalogTable table = catalog.getTable( schemaId, "test_table" ); assertEquals( tableId, table.id ); @@ -176,7 +176,7 @@ public void testTable() throws GenericCatalogException { List ids = new ArrayList<>(); for ( String name : names ) { - ids.add( catalog.addTable( name, schemaId, userId, EntityType.ENTITY, true ) ); + ids.add( catalog.addTable( name, schemaId, userId, EntityType.ENTITY, true, false, false ) ); } // test renaming table @@ -234,7 +234,7 @@ public void testColumn() throws GenericCatalogException { long schemaId = catalog.addNamespace( "schema1", databaseId, userId, NamespaceType.RELATIONAL ); - long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); + long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true, false, false ); List columnNames = new ArrayList<>( Arrays.asList( "column1", "column2", "column3", "column4", "column5" ) ); List columnIds = new ArrayList<>(); @@ -314,7 +314,7 @@ public void testColumnPlacement() throws UnknownAdapterException { long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); long schemaId = catalog.addNamespace( "schema1", databaseId, userId, NamespaceType.RELATIONAL ); - long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); + long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true, false, false ); long columnId = catalog.addColumn( "column1", tableId, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); CatalogColumn column = catalog.getColumn( columnId ); @@ -345,7 +345,7 @@ public void testKey() throws GenericCatalogException { long databaseId = catalog.addDatabase( "APP", userId, user.name, 0, "" ); long schemaId = catalog.addNamespace( "schema1", databaseId, userId, NamespaceType.RELATIONAL ); - long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true ); + long tableId = catalog.addTable( "table1", schemaId, userId, EntityType.ENTITY, true, false, false ); long columnId1 = catalog.addColumn( "column1", tableId, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); CatalogColumn column1 = catalog.getColumn( columnId1 ); @@ -403,7 +403,7 @@ public void testKey() throws GenericCatalogException { } // test foreign key - long tableId2 = catalog.addTable( "table2", schemaId, userId, EntityType.ENTITY, true ); + long tableId2 = catalog.addTable( "table2", schemaId, userId, EntityType.ENTITY, true, false, false ); long columnId3 = catalog.addColumn( "column3", tableId2, 0, PolyType.BIGINT, null, null, null, null, null, false, null ); CatalogColumn column3 = catalog.getColumn( columnId3 ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java index 08efa10ed4..047fb85e95 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/SqlCreateTable.java @@ -248,7 +248,7 @@ public void execute( Context context, Statement statement, QueryParameters param ifNotExists, stores, placementType, - statement ); + false, statement, false ); if ( partitionType != null ) { DdlManager.getInstance().addPartitioning( diff --git a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/SchemaToJsonMapperTest.java b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/SchemaToJsonMapperTest.java index 3a98c95de6..af0adc232b 100644 --- a/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/SchemaToJsonMapperTest.java +++ b/plugins/sql-language/src/test/java/org/polypheny/db/sql/map/SchemaToJsonMapperTest.java @@ -63,7 +63,7 @@ public void exportTest() { ImmutableList.of(), true, PartitionProperty.builder().build(), - ImmutableList.of() ); + ImmutableList.of(), false, false ); Catalog catalog = Catalog.getInstance(); Arrays.asList( new CatalogColumn( 5, "sid", 4, 1, 1, 1, PolyType.INTEGER, null, null, null, null, null, false, null, null ), From f50d191e72934217893a99031960e0319d8433c6 Mon Sep 17 00:00:00 2001 From: datomo Date: Fri, 1 Sep 2023 20:31:01 +0200 Subject: [PATCH 20/22] used router for caching --- .../db/algebra/rules/CacheSwitcherRule.java | 54 ------------------- .../db/plan/volcano/VolcanoPlanner.java | 2 - .../db/routing/routers/BaseRouter.java | 35 ++++++++++++ .../adapter/ethereum/EthereumDataSource.java | 15 +++--- .../adapter/ethereum/EventCacheManager.java | 8 ++- 5 files changed, 46 insertions(+), 68 deletions(-) delete mode 100644 core/src/main/java/org/polypheny/db/algebra/rules/CacheSwitcherRule.java diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/CacheSwitcherRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/CacheSwitcherRule.java deleted file mode 100644 index b896b07b0a..0000000000 --- a/core/src/main/java/org/polypheny/db/algebra/rules/CacheSwitcherRule.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2019-2023 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.algebra.rules; - -import java.util.List; -import org.polypheny.db.algebra.logical.relational.LogicalScan; -import org.polypheny.db.catalog.Catalog; -import org.polypheny.db.catalog.entity.CatalogTable; -import org.polypheny.db.plan.AlgOptRule; -import org.polypheny.db.plan.AlgOptRuleCall; -import org.polypheny.db.plan.AlgOptRuleOperand; -import org.polypheny.db.plan.AlgOptTable; - -public class CacheSwitcherRule extends AlgOptRule { - - public static CacheSwitcherRule INSTANCE = new CacheSwitcherRule( operand( LogicalScan.class, none() ) ); - - - public CacheSwitcherRule( AlgOptRuleOperand operand ) { - super( operand ); - } - - - @Override - public void onMatch( AlgOptRuleCall call ) { - LogicalScan scan = call.alg( 0 ); - Long id = scan.getTable().getTable().getTableId(); - if ( id == null ) { - return; - } - - CatalogTable table = Catalog.getInstance().getTable( id ); - if ( !table.cached ) { - return; - } - String adjustedName = Catalog.HIDDEN_PREFIX + table.name; - AlgOptTable cache = scan.getTable().getRelOptSchema().getTableForMember( List.of( table.getNamespaceName(), table.getNamespaceName(), adjustedName ) ); - } - -} diff --git a/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java b/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java index a1921368ab..e6abfe2f19 100644 --- a/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java +++ b/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java @@ -71,7 +71,6 @@ import org.polypheny.db.algebra.rules.AggregateJoinTransposeRule; import org.polypheny.db.algebra.rules.AggregateProjectMergeRule; import org.polypheny.db.algebra.rules.AggregateRemoveRule; -import org.polypheny.db.algebra.rules.CacheSwitcherRule; import org.polypheny.db.algebra.rules.CalcRemoveRule; import org.polypheny.db.algebra.rules.DocumentToEnumerableRule; import org.polypheny.db.algebra.rules.FilterJoinRule; @@ -851,7 +850,6 @@ public void registerAbstractRelationalRules() { //addRule( ProjectRemoveRule.INSTANCE ); addRule( AggregateJoinTransposeRule.INSTANCE ); addRule( AggregateProjectMergeRule.INSTANCE ); - addRule( CacheSwitcherRule.INSTANCE ); addRule( CalcRemoveRule.INSTANCE ); addRule( SortRemoveRule.INSTANCE ); diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 6325554016..6e7fe4e40f 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -61,12 +61,15 @@ import org.polypheny.db.catalog.entity.CatalogCollectionPlacement; import org.polypheny.db.catalog.entity.CatalogColumn; import org.polypheny.db.catalog.entity.CatalogColumnPlacement; +import org.polypheny.db.catalog.entity.CatalogDataPlacement; import org.polypheny.db.catalog.entity.CatalogGraphDatabase; import org.polypheny.db.catalog.entity.CatalogGraphMapping; import org.polypheny.db.catalog.entity.CatalogGraphPlacement; +import org.polypheny.db.catalog.entity.CatalogPartition; import org.polypheny.db.catalog.entity.CatalogPartitionPlacement; import org.polypheny.db.catalog.entity.CatalogSchema; import org.polypheny.db.catalog.entity.CatalogTable; +import org.polypheny.db.catalog.exceptions.UnknownTableException; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; @@ -185,6 +188,12 @@ public RoutedAlgBuilder handleScan( long partitionId, NamespaceType namespaceType ) { + CatalogTable table = Catalog.getInstance().getTable( tableId ); + + if ( table.cached ) { + return handleCached( builder, statement, storeUniqueName, physicalSchemaName, namespaceType, table ); + } + AlgNode node = builder.scan( ImmutableList.of( PolySchemaBuilder.buildAdapterSchemaName( storeUniqueName, logicalSchemaName, physicalSchemaName ), logicalTableName + "_" + partitionId ) ).build(); @@ -208,6 +217,32 @@ public RoutedAlgBuilder handleScan( } + private RoutedAlgBuilder handleCached( RoutedAlgBuilder builder, Statement statement, String storeUniqueName, String physicalSchemaName, NamespaceType namespaceType, CatalogTable table ) { + //todo add cache status later + CatalogTable cached; + try { + cached = Catalog.getInstance().getTable( table.namespaceId, Catalog.HIDDEN_PREFIX + table.name ); + } catch ( UnknownTableException e ) { + throw new RuntimeException( e ); + } + + CatalogDataPlacement placement = Catalog.getInstance().getDataPlacements( cached.id ).get( 0 ); + CatalogPartition partition = Catalog.getInstance().getPartitionsByTable( cached.id ).get( 0 ); + + return handleScan( + builder, + statement, + cached.id, + placement.getAdapterName(), + cached.getNamespaceName(), + cached.name, + physicalSchemaName, + PolySchemaBuilder.buildAdapterSchemaName( storeUniqueName, cached.getNamespaceName(), physicalSchemaName ), + partition.id, + namespaceType ); + } + + private AlgDataType getDocumentRowType() { // label table for cross model queries final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java index 96534bd153..7206c79f1d 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java @@ -188,20 +188,20 @@ public Map> getExportedColumns() { Map> map = new HashMap<>(); String[] blockColumns = { "number", "hash", "parent_hash", "nonce", "sha3uncles", "logs_bloom", "transactions_root", "state_root", "receipts_root", "author", "miner", "mix_hash", "difficulty", "total_difficulty", "extra_data", "size", "gas_limit", "gas_used", "timestamp" }; - PolyType[] blockTypes = { PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.TIMESTAMP }; + PolyType[] blockTypes = { PolyType.DECIMAL, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.TIMESTAMP }; createExportedColumns( "block", map, blockColumns, blockTypes ); String[] transactionColumns = { "hash", "nonce", "block_hash", "block_number", "transaction_index", "from", "to", "value", "gas_price", "gas", "input", "creates", "public_key", "raw", "r", "s" }; - PolyType[] transactionTypes = { PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR }; + PolyType[] transactionTypes = { PolyType.VARCHAR, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.VARCHAR }; createExportedColumns( "transaction", map, transactionColumns, transactionTypes ); - if ( eventDataRetrieval == false ) { + if ( !eventDataRetrieval ) { this.map = map; return map; } String[] commonEventColumns = { "removed", "log_index", "transaction_index", "transaction_hash", "block_hash", "block_number", "address" }; - PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.BIGINT, PolyType.BIGINT, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.BIGINT, PolyType.VARCHAR }; + PolyType[] commonEventTypes = { PolyType.BOOLEAN, PolyType.DECIMAL, PolyType.DECIMAL, PolyType.VARCHAR, PolyType.VARCHAR, PolyType.DECIMAL, PolyType.VARCHAR }; createExportedColumnsForEvents( map, commonEventColumns, commonEventTypes ); if ( caching == Boolean.TRUE ) { @@ -336,7 +336,7 @@ private void createExportedColumnsForEvents( Map> m for ( String address : smartContractAddresses ) { String contractName = null; List contractEvents = null; - if ( useManualABI == true && !contractABI.isEmpty() && !this.contractName.isEmpty() ) { + if ( useManualABI && !contractABI.isEmpty() && !this.contractName.isEmpty() ) { if ( smartContractAddresses.size() > 1 ) { throw new IllegalArgumentException( "Only one smart contract address should be provided when using a manual ABI." ); } @@ -549,6 +549,8 @@ private Integer getLengthForType( PolyType type ) { return 300; case VARBINARY: return 32; + case DECIMAL: + return 100; default: return null; } @@ -564,9 +566,8 @@ static PolyType convertToPolyType( String type ) { return PolyType.DECIMAL; } else if ( type.equals( "bytes" ) || type.startsWith( "bytes" ) ) { return PolyType.VARCHAR; // for dynamic and fixed-size - } else { - return null; } + throw new RuntimeException( "Could not find a matching PolyType" ); } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 078ae499dc..f0bdc0bd29 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -87,12 +87,11 @@ public class EventCacheManager implements Runnable { * * @param manager is used to create new transactions, which are required to create new queries. */ - public static synchronized EventCacheManager getAndSet( TransactionManager manager ) { + public static synchronized void getAndSet( TransactionManager manager ) { if ( INSTANCE != null ) { throw new RuntimeException( String.format( "The %s was already set.", EventCacheManager.class.getSimpleName() ) ); } INSTANCE = new EventCacheManager( manager ); - return INSTANCE; } @@ -148,8 +147,7 @@ void createTables( int sourceAdapterId, Map> tabl private Transaction getTransaction() { try { - Transaction transaction = transactionManager.startTransaction( Catalog.defaultDatabaseId, Catalog.defaultUserId, false, "Ethereum Plugin" ); - return transaction; + return transactionManager.startTransaction( Catalog.defaultDatabaseId, Catalog.defaultUserId, false, "Ethereum Plugin" ); } catch ( UnknownSchemaException | UnknownDatabaseException | GenericCatalogException | UnknownUserException e ) { throw new RuntimeException( e ); } @@ -172,7 +170,7 @@ void writeToStore( String tableName, List> logResults, int targetAd AlgDataType rowType = table.getTable().getRowType( transaction.getTypeFactory() ); builder.push( LogicalValues.createOneRow( builder.getCluster() ) ); builder.project( rowType.getFieldList().stream().map( f -> new RexDynamicParam( f.getType(), f.getIndex() ) ).collect( Collectors.toList() ), rowType.getFieldNames() ); - builder.insert( (AlgOptTable) table ); + builder.insert( table ); // todo DL: we should re-use this for all batches (ignore right now) AlgNode node = builder.build(); // Construct the algebraic node From 0954a1b12d48a7ab4028674b2f685e890d4ff182 Mon Sep 17 00:00:00 2001 From: datomo Date: Sun, 3 Sep 2023 21:42:56 +0200 Subject: [PATCH 21/22] hide caching tables and fixed uncached impl --- dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java | 2 +- .../org/polypheny/db/adapter/ethereum/EthereumDataSource.java | 2 +- .../org/polypheny/db/adapter/ethereum/EventCacheManager.java | 2 +- webui/src/main/java/org/polypheny/db/webui/Crud.java | 3 +++ 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 2a87dfb9b0..0f4c4245bd 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -2212,7 +2212,7 @@ public void createTable( long schemaId, String name, List fiel EntityType.ENTITY, true, cached, - false ); + hidden ); // Initially create DataPlacement containers on every store the table should be placed. stores.forEach( store -> catalog.addDataPlacement( store.getAdapterId(), tableId ) ); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java index 7206c79f1d..57995f06c6 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java @@ -111,7 +111,6 @@ public class EthereumDataSource extends DataSource { public EthereumDataSource( final int storeId, final String uniqueName, final Map settings ) { super( storeId, uniqueName, settings, true ); - this.canCache = true; setClientURL( settings.get( "ClientUrl" ) ); this.blocks = Integer.parseInt( settings.get( "Blocks" ) ); this.experimentalFiltering = Boolean.parseBoolean( settings.get( "ExperimentalFiltering" ) ); @@ -126,6 +125,7 @@ public EthereumDataSource( final int storeId, final String uniqueName, final Map this.batchSizeInBlocks = Integer.parseInt( settings.get( "batchSizeInBlocks" ) ); this.eventDataMap = new HashMap<>(); this.caching = Boolean.parseBoolean( settings.get( "Caching" ) ); + this.canCache = this.caching; this.cachingAdapterTargetName = settings.get( "CachingAdapterTargetName" ); this.useManualABI = Boolean.parseBoolean( settings.get( "UseManualABI" ) ); this.contractABI = settings.get( "ContractABI" ); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index f0bdc0bd29..44939f0b89 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -131,7 +131,7 @@ void createTables( int sourceAdapterId, Map> tabl // For each table, a new table is created with their constraint (e.g., a primary key). for ( Entry> table : tableInformations.entrySet() ) { ConstraintInformation primaryConstraint = new ConstraintInformation( table.getKey() + "primary", ConstraintType.PRIMARY, List.of( "log_index", "transaction_index", "block_number" ) ); - DdlManager.getInstance().createTable( namespaceId, table.getKey(), table.getValue(), List.of( primaryConstraint ), false, List.of( store ), PlacementType.AUTOMATIC, false, transaction.createStatement(), false ); + DdlManager.getInstance().createTable( namespaceId, table.getKey(), table.getValue(), List.of( primaryConstraint ), false, List.of( store ), PlacementType.AUTOMATIC, false, transaction.createStatement(), true ); } try { diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index a2c77cc86e..cf0ac77149 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -395,6 +395,9 @@ void getSchemaTree( final Context ctx ) { ArrayList collectionTree = new ArrayList<>(); List tables = catalog.getTables( schema.id, null ); for ( CatalogTable table : tables ) { + if ( table.hidden ) { + continue; + } String icon = "fa fa-table"; if ( table.entityType == EntityType.SOURCE ) { icon = "fa fa-plug"; From 73f62b4026676eab4f2da63e13d6be3d3d389683 Mon Sep 17 00:00:00 2001 From: Tunc Polat Date: Mon, 4 Sep 2023 03:40:39 +0200 Subject: [PATCH 22/22] Clean up code by removing unnecessary elements and logs --- .../db/adapter/ethereum/CachingStatus.java | 1 - .../db/adapter/ethereum/ContractCache.java | 28 +++++------ .../adapter/ethereum/EthereumDataSource.java | 14 +++--- .../db/adapter/ethereum/EthereumPlugin.java | 7 --- .../db/adapter/ethereum/EthereumSchema.java | 3 -- .../db/adapter/ethereum/EthereumStarter.java | 6 --- .../db/adapter/ethereum/EthereumTable.java | 1 + .../db/adapter/ethereum/EventCache.java | 18 ++------ .../adapter/ethereum/EventCacheManager.java | 46 ++++++------------- .../db/adapter/ethereum/EventData.java | 1 + .../db/adapter/ethereum/EventDataReader.java | 16 +++---- 11 files changed, 48 insertions(+), 93 deletions(-) diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java index b22ac6f4fc..4c0ff95937 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/CachingStatus.java @@ -21,7 +21,6 @@ public class CachingStatus { public float percent; - public ProcessingState state; public BigInteger fromBlock; public BigInteger toBlock; diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java index 9ac873289c..100a4383c4 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/ContractCache.java @@ -29,6 +29,9 @@ import org.web3j.protocol.Web3j; import org.web3j.protocol.http.HttpService; +/** + * ContractCache serves as a contract-level cache that holds a set of smart contracts along with their corresponding events. + */ @Slf4j public class ContractCache { @@ -44,7 +47,6 @@ public class ContractCache { private final Map cache = new ConcurrentHashMap<>(); // a cache for each event - private final Map> eventsPerContract; protected final Web3j web3j; @@ -56,7 +58,6 @@ public ContractCache( int sourceAdapterId, int targetAdapterId, String clientUrl this.fromBlock = fromBlock; this.currentBlock = fromBlock; this.toBlock = toBlock; - this.eventsPerContract = eventsPerContract; this.web3j = Web3j.build( new HttpService( clientUrl ) ); eventsPerContract.forEach( ( address, events ) -> this.cache.put( address, new EventCache( events, web3j ) ) ); } @@ -71,7 +72,6 @@ public void initializeCaching() { private void createSchema() { - log.warn( "start to create schema" ); Map> columnInformations = columns.entrySet() .stream() .collect( @@ -82,12 +82,12 @@ private void createSchema() { .map( ExportedColumn::toFieldInformation ) .collect( Collectors.toList() ) ) ); - EventCacheManager.getInstance().createTables( sourceAdapterId, columnInformations, targetAdapterId ); + EventCacheManager.getInstance().createTables( columnInformations, targetAdapterId ); } public void startCaching() { - log.warn( "start to cache" ); + log.debug( "start to cache" ); currentBlock = fromBlock; while ( currentBlock.compareTo( toBlock ) <= 0 ) { @@ -96,20 +96,20 @@ public void startCaching() { endBlock = toBlock; } - log.warn( "from-to: " + currentBlock + " to " + endBlock ); // in production: instead of .warn take .debug + log.debug( "from-to: " + currentBlock + " to " + endBlock ); for ( Map.Entry entry : cache.entrySet() ) { String address = entry.getKey(); EventCache eventCache = entry.getValue(); try { - eventCache.addToCache(address, currentBlock, endBlock, targetAdapterId); - } catch (CacheException e) { - log.error("Error occurred while adding to cache: " + e.getMessage()); + eventCache.addToCache( address, currentBlock, endBlock ); + } catch ( CacheException e ) { + log.error( "Error occurred while adding to cache: " + e.getMessage() ); hasError = true; errorMessage = e.getMessage(); throw e; - } catch (Throwable t) { - log.error("Unexpected error during caching: " + t.getMessage(), t); + } catch ( Throwable t ) { + log.error( "Unexpected error during caching: " + t.getMessage(), t ); hasError = true; errorMessage = t.getMessage(); return; @@ -127,7 +127,7 @@ public CachingStatus getStatus() { status.fromBlock = fromBlock; status.toBlock = toBlock; status.currentBlock = currentBlock; - status.currentEndBlock = currentBlock.add(BigInteger.valueOf(batchSizeInBlocks)); + status.currentEndBlock = currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ); status.sourceAdapterId = sourceAdapterId; if ( currentBlock.add( BigInteger.valueOf( batchSizeInBlocks ) ).compareTo( toBlock ) > 0 ) { @@ -137,7 +137,7 @@ public CachingStatus getStatus() { status.currentEndBlock = null; } else { BigInteger processedBlocks = currentBlock.subtract( fromBlock ); - status.percent = Math.round((processedBlocks.floatValue() / totalBlocks.floatValue() * 100) * 100) / 100f; + status.percent = Math.round( (processedBlocks.floatValue() / totalBlocks.floatValue() * 100) * 100 ) / 100f; if ( status.percent == 0 ) { status.state = CachingStatus.ProcessingState.INITIALIZED; @@ -146,7 +146,7 @@ public CachingStatus getStatus() { } } - if (hasError) { + if ( hasError ) { status.state = ProcessingState.ERROR; status.errorMessage = errorMessage; } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java index 57995f06c6..bd96412e9b 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumDataSource.java @@ -69,9 +69,9 @@ @AdapterSettingBoolean(name = "ExperimentalFiltering", description = "Experimentally filter Past Block", defaultValue = false, position = 3, modifiable = true) @AdapterSettingBoolean(name = "EventDataRetrieval", description = "Enables or disables the retrieval of event data. When set to true, all subsequent adapter settings will be taken into account.", defaultValue = true, position = 4, modifiable = true) @AdapterSettingString(name = "SmartContractAddresses", description = "Comma sepretaed addresses of the smart contracts", defaultValue = "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984, 0x6b175474e89094c44da98b954eedeac495271d0f", position = 5, modifiable = true) // Event Data: Add annotation -@AdapterSettingString(name = "EtherscanApiKey", description = "Etherscan API Token", defaultValue = "PJBVZ3BE1AI5AKIMXGK1HNC59PCDH7CQSP", position = 6, modifiable = true) // Event Data: Add annotation -@AdapterSettingString(name = "fromBlock", description = "Fetch block from (Smart Contract)", defaultValue = "17669045", position = 7, modifiable = true) -@AdapterSettingString(name = "toBlock", description = "Fetch block to (Smart Contract)", defaultValue = "17669155", position = 8, modifiable = true) +@AdapterSettingString(name = "EtherscanApiKey", description = "Etherscan API Token", defaultValue = "PJBVZ3BE1AI5AKIMXGK1HNC59PCDH7CQSP", position = 6, modifiable = true) +@AdapterSettingString(name = "fromBlock", description = "Fetch block from (Smart Contracts)", defaultValue = "17669045", position = 7, modifiable = true) +@AdapterSettingString(name = "toBlock", description = "Fetch block to (Smart Contracts)", defaultValue = "17669155", position = 8, modifiable = true) @AdapterSettingBoolean(name = "Caching", description = "Cache event data", defaultValue = true, position = 9, modifiable = true) @AdapterSettingInteger(name = "batchSizeInBlocks", description = "Batch size for caching in blocks", defaultValue = 50, position = 10, modifiable = true) @AdapterSettingString(name = "CachingAdapterTargetName", description = "Adapter Target Name", defaultValue = "hsqldb", position = 11, modifiable = true) @@ -179,7 +179,6 @@ public void truncate( Context context, CatalogTable table ) { @Override public Map> getExportedColumns() { - log.warn( "getExportedColumn" ); // Ensure that this block of code is called only once by checking if 'map' is null before proceeding if ( map != null ) { return map; @@ -230,7 +229,6 @@ public Map> getExportedColumns() { .register( getAdapterId(), cachingAdapter.id, clientURL, batchSizeInBlocks, fromBlock, toBlock, eventsPerContract, columns ) .initializeCaching(); } catch ( UnknownAdapterException e ) { - // If the specified adapter is not found, throw a RuntimeException throw new RuntimeException( e ); } } ).start(); @@ -512,10 +510,10 @@ private String getContractName( String contractAddress ) { throw new RuntimeException( "Etherscan API error getting contract name: " + errorMessage ); } - JSONArray resultArray = jsonObject.getJSONArray( "result" ); // Get result array + JSONArray resultArray = jsonObject.getJSONArray( "result" ); if ( resultArray.length() > 0 ) { - JSONObject contractObject = resultArray.getJSONObject( 0 ); // Get the first object in result array - return contractObject.getString( "ContractName" ); // Return ContractName field + JSONObject contractObject = resultArray.getJSONObject( 0 ); + return contractObject.getString( "ContractName" ); } } diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java index 8a836d8888..2d9cdb8a85 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumPlugin.java @@ -47,15 +47,8 @@ public void start() { "Blocks", "10", "ExperimentalFiltering", "false" ); - TransactionExtension.REGISTER.add( new EthereumStarter() ); // add extension to transaction manager - Adapter.addAdapter( EthereumDataSource.class, ADAPTER_NAME, settings ); - } - - - - } \ No newline at end of file diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java index df36745f04..e7c65857fe 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumSchema.java @@ -21,7 +21,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; -import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeImpl; @@ -36,7 +35,6 @@ import org.polypheny.db.type.PolyTypeFactoryImpl; import org.polypheny.db.util.Util; -@Slf4j public class EthereumSchema extends AbstractSchema { private final String clientUrl; @@ -63,7 +61,6 @@ public Table createBlockchainTable( CatalogTable catalogTable, List i ).toArray(); EthereumMapper mapper = catalogTable.name.startsWith( "block" ) ? EthereumMapper.BLOCK : catalogTable.name.startsWith( "transaction" ) ? EthereumMapper.TRANSACTION : EthereumMapper.EVENTDATA; - // each table will get one EthereumTable; send event metadata down here. EthereumTable.Builder tableBuilder = new EthereumTable.Builder( clientUrl, AlgDataTypeImpl.proto( fieldInfo.build() ), diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java index 1a53b46b88..6ec7651743 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumStarter.java @@ -16,18 +16,12 @@ package org.polypheny.db.adapter.ethereum; -import lombok.extern.slf4j.Slf4j; import org.polypheny.db.iface.Authenticator; import org.polypheny.db.processing.TransactionExtension; import org.polypheny.db.transaction.TransactionManager; import org.polypheny.db.webui.HttpServer; import org.polypheny.db.webui.HttpServer.HandlerType; -// helper method, because Polypheny will create the TransactionManager (TM) relatively late -// Polypheny will startup and then get all the plugins -// But at this point there is no access to the TM -// We just say here, hey this is a TransactionExtension that says: Hey this an extension that the TM needs, please call this too as soon as we have the TM -@Slf4j public class EthereumStarter implements TransactionExtension { @Override diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java index 8186fdc038..434fdc1efd 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EthereumTable.java @@ -64,6 +64,7 @@ public EthereumTable( Builder builder ) { } + // Utilize nested Builder pattern to provide the flexibility of toggling between fetching event data and not fetching it. public static class Builder { protected final String clientUrl; diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java index d77ccd7090..24b30a4a03 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCache.java @@ -23,12 +23,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; -import org.apache.commons.lang3.NotImplementedException; -import org.polypheny.db.adapter.DataSource.ExportedColumn; -import org.polypheny.db.ddl.DdlManager.FieldInformation; -import org.polypheny.db.type.PolyType; import org.web3j.abi.EventEncoder; import org.web3j.abi.FunctionReturnDecoder; import org.web3j.abi.TypeReference; @@ -39,11 +33,8 @@ import org.web3j.protocol.core.Response; import org.web3j.protocol.core.methods.request.EthFilter; import org.web3j.protocol.core.methods.response.EthLog; -import org.web3j.protocol.core.methods.response.EthLog.LogResult; import org.web3j.protocol.core.methods.response.Log; -import org.web3j.protocol.http.HttpService; -@Slf4j // library to use logging annotations public class EventCache { private final Map>> cache = new ConcurrentHashMap<>(); // a cache for each event @@ -58,13 +49,13 @@ public EventCache( List events, Web3j web3j ) { } - public void addToCache( String address, BigInteger startBlock, BigInteger endBlock, int targetAdapterId ) { + public void addToCache( String address, BigInteger startBlock, BigInteger endBlock ) { for ( EventData event : events ) { addLogsToCache( address, event, startBlock, endBlock ); if ( cache.get( event ).size() == 0 ) { continue; } - EventCacheManager.getInstance().writeToStore( event.getCompositeName(), cache.get( event ), targetAdapterId ); // write the event into the store + EventCacheManager.getInstance().writeToStore( event.getCompositeName(), cache.get( event ) ); // write event data into the store cache.get( event ).clear(); // clear cache batch } } @@ -85,7 +76,6 @@ private void addLogsToCache( String address, EventData eventData, BigInteger sta if ( ethLog.hasError() ) { Response.Error error = ethLog.getError(); - log.error( "Error fetching logs: " + error.getMessage() ); throw new CacheException( "Error occurred while fetching logs for block range: " + startBlock + " to " + endBlock + ". Please retry starting from block " + startBlock + " and continue to your intended final block. Error Message: " + error.getMessage() ); } List rawLogs = ethLog.getLogs(); @@ -106,7 +96,7 @@ private List> normalizeLogs( Event event, List ra if ( rawLog.getLogIndex() == null || rawLog.getTransactionIndex() == null || rawLog.getBlockNumber() == null ) { - continue; // don't add pending logs because of primary key + continue; // don't add pending logs because of primary key constraint } List structuredLog = new ArrayList<>(); @@ -129,7 +119,7 @@ private List> normalizeLogs( Event event, List ra } } - // Add other log information as needed + // Add other log information structuredLog.add( rawLog.isRemoved() ); structuredLog.add( rawLog.getLogIndex() ); structuredLog.add( rawLog.getTransactionIndex() ); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java index 44939f0b89..aadaf26e47 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventCacheManager.java @@ -25,8 +25,6 @@ import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; -import javax.annotation.Nullable; -import lombok.extern.slf4j.Slf4j; import org.polypheny.db.PolyImplementation; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataSource.ExportedColumn; @@ -68,16 +66,13 @@ import org.web3j.abi.datatypes.Uint; -@Slf4j public class EventCacheManager implements Runnable { - // Singleton instance of EventCacheManager (T) private static EventCacheManager INSTANCE = null; private final TransactionManager transactionManager; - // concurrent map, which maintains multiple caches, which correspond to the adapter which requested the caches - // to allow multiple threads to read and modify; keys: adapterId, value: EventCache (T) + public Map caches = new ConcurrentHashMap<>(); @@ -115,20 +110,13 @@ public ContractCache register( int sourceAdapterId, int targetAdapterId, String } - @Nullable - public ContractCache getCache( int adapterId ) { - return caches.get( adapterId ); - } - - - void createTables( int sourceAdapterId, Map> tableInformations, int targetAdapterId ) { - log.warn( "start to create tables" ); + void createTables( Map> tableInformations, int targetAdapterId ) { try { long namespaceId = Catalog.getInstance().getSchema( Catalog.defaultDatabaseId, "public" ).id; // get the default schema Transaction transaction = getTransaction(); // get the transaction DataStore store = AdapterManager.getInstance().getStore( targetAdapterId ); // get the target store from the adapater - // For each table, a new table is created with their constraint (e.g., a primary key). + // For each table, a new table is created with their constraints (e.g., primary key). for ( Entry> table : tableInformations.entrySet() ) { ConstraintInformation primaryConstraint = new ConstraintInformation( table.getKey() + "primary", ConstraintType.PRIMARY, List.of( "log_index", "transaction_index", "block_number" ) ); DdlManager.getInstance().createTable( namespaceId, table.getKey(), table.getValue(), List.of( primaryConstraint ), false, List.of( store ), PlacementType.AUTOMATIC, false, transaction.createStatement(), true ); @@ -154,7 +142,7 @@ private Transaction getTransaction() { } - void writeToStore( String tableName, List> logResults, int targetAdapterId ) { + void writeToStore( String tableName, List> logResults ) { if ( logResults.isEmpty() ) { return; } @@ -163,7 +151,6 @@ void writeToStore( String tableName, List> logResults, int targetAd AlgBuilder builder = AlgBuilder.create( statement ); - // TableEntry table = transaction.getSchema().getTable( EthereumPlugin.HIDDEN_PREFIX + tableName ); AlgOptSchema algOptSchema = transaction.getCatalogReader(); AlgOptTable table = algOptSchema.getTableForMember( Collections.singletonList( Catalog.HIDDEN_PREFIX + tableName ) ); @@ -186,19 +173,16 @@ void writeToStore( String tableName, List> logResults, int targetAd List fieldValues = new ArrayList<>(); for ( List logResult : logResults ) { Object value = logResult.get( i ); - value = convertValueBasedOnType(value); + value = convertValueBasedOnType( value ); fieldValues.add( value ); } i++; statement.getDataContext().addParameterValues( idx, type, fieldValues ); // take the correct indexedParameters - at the moment we only add one row at a time, could refactor to add the whole batch } - log.warn( "write to store before; table name: " + tableName ); // execute the transaction (query will be executed) PolyImplementation implementation = statement.getQueryProcessor().prepareQuery( root, false ); // implements the code basically - log.warn( "write to store after; table name: " + tableName ); implementation.getRows( statement, -1 ); // Executes the query, with -1 meaning to fill in the whole batch - log.warn( "finish write to store for table: " + tableName ); try { transaction.commit(); } catch ( TransactionException e ) { @@ -212,21 +196,22 @@ protected Map getAllStreamStatus() { return caches.values().stream().collect( Collectors.toMap( c -> c.sourceAdapterId, ContractCache::getStatus ) ); } - private Object convertValueBasedOnType(Object value) { - if (value instanceof Address) { + + private Object convertValueBasedOnType( Object value ) { + if ( value instanceof Address ) { return value.toString(); - } else if (value instanceof Bool) { + } else if ( value instanceof Bool ) { return ((Bool) value).getValue(); - } else if (value instanceof DynamicBytes) { + } else if ( value instanceof DynamicBytes ) { return ((DynamicBytes) value).getValue().toString(); - } else if (value instanceof Bytes) { + } else if ( value instanceof Bytes ) { // Similarly for Bytes and its subclasses (e.g. Bytes1...Bytes32) return value.toString(); - } else if (value instanceof Uint) { // Similarly for Uint and its subclasses + } else if ( value instanceof Uint ) { // Similarly for Uint and its subclasses (e.g. Uint256) BigInteger bigIntValue = ((Uint) value).getValue(); - return bigIntValue == null ? null : new BigDecimal(bigIntValue); - } else if (value instanceof Int) { // Similarly for Int and its subclasses + return bigIntValue == null ? null : new BigDecimal( bigIntValue ); + } else if ( value instanceof Int ) { // Similarly for Int and its subclasses BigInteger bigIntValue = ((Int) value).getValue(); - return bigIntValue == null ? null : new BigDecimal(bigIntValue); + return bigIntValue == null ? null : new BigDecimal( bigIntValue ); } return value; // return the original value if none of the conditions match } @@ -234,7 +219,6 @@ private Object convertValueBasedOnType(Object value) { @Override public void run() { - } } \ No newline at end of file diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java index c0af0864ed..171614d905 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventData.java @@ -182,6 +182,7 @@ private static List> createTypeReferences( JSONArray abiInputs } ); break; default: + // cover all cases; doing this dynamically would need external library if ( type.startsWith( "uint" ) ) { int bitSize = Integer.parseInt( type.substring( 4 ) ); // Get the bit size, e.g., 8 from uint8 typeReferences.add( createUintTypeReference( bitSize, indexed ) ); diff --git a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java index 3988b0ca39..2b1ac0b861 100644 --- a/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java +++ b/plugins/ethereum-adapter/src/main/java/org/polypheny/db/adapter/ethereum/EventDataReader.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.Arrays; import java.util.function.Predicate; -import lombok.extern.slf4j.Slf4j; import org.web3j.abi.FunctionReturnDecoder; import org.web3j.abi.datatypes.Type; import org.web3j.protocol.core.Response; @@ -34,7 +33,7 @@ import org.web3j.abi.TypeReference; import org.web3j.abi.EventEncoder; -@Slf4j + public class EventDataReader extends BlockReader { private List logs; @@ -55,11 +54,10 @@ public class EventDataReader extends BlockReader { filter.addSingleTopic( EventEncoder.encode( event ) ); try { - EthLog ethLog = web3j.ethGetLogs( filter ).send(); // Get the EthLog response + EthLog ethLog = web3j.ethGetLogs( filter ).send(); if ( ethLog.hasError() ) { Response.Error error = ethLog.getError(); - log.error( "Error fetching logs: " + error.getMessage() ); throw new RuntimeException( "Error fetching logs: " + error.getMessage() ); } @@ -74,22 +72,22 @@ public class EventDataReader extends BlockReader { @Override public String[] readNext() throws IOException { if ( this.blockReads <= 0 || currentLogIndex >= logs.size() ) { - return null; // no more blocks to read or no more logs to process + return null; } EthLog.LogResult logResult = logs.get( currentLogIndex ); Log log = (Log) logResult.get(); - currentLogIndex++; // Move to the next log for the next call to readNext() + currentLogIndex++; if ( currentLogIndex >= logs.size() ) { - this.blockReads--; // Decrement blockReads when all logs for the current block have been processed + this.blockReads--; } - // Decode the data field of the log(non-indexed parameters) + // Decode the data field of the log (non-indexed parameters) String data = log.getData(); List decodedData = FunctionReturnDecoder.decode( data, event.getNonIndexedParameters() ); - // Decode the topics of the log + // Decode the topics of the log (index parameters) List topics = log.getTopics(); topics.remove( 0 ); // The first topic is the event signature, so we skip it List decodedTopics = new ArrayList<>();