Skip to content

Commit

Permalink
Merge pull request #118 from SongY123/main
Browse files Browse the repository at this point in the history
support spatial query
  • Loading branch information
cirnoooo123 authored Feb 27, 2023
2 parents 71d4377 + 6bcc126 commit 10f41e8
Show file tree
Hide file tree
Showing 15 changed files with 244 additions and 270 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,15 @@
import com.google.common.reflect.TypeToken;
import com.google.gson.Gson;
import com.hufudb.openhufu.benchmark.enums.SpatialTableName;
import com.hufudb.openhufu.benchmark.enums.TPCHTableName;
import com.hufudb.openhufu.core.table.GlobalTableConfig;
import com.hufudb.openhufu.data.schema.Schema;
import com.hufudb.openhufu.data.storage.DataSet;
import com.hufudb.openhufu.data.storage.DataSetIterator;
import com.hufudb.openhufu.expression.AggFuncType;
import com.hufudb.openhufu.expression.ExpressionFactory;
import com.hufudb.openhufu.owner.user.OpenHuFuUser;
import com.hufudb.openhufu.plan.BinaryPlan;
import com.hufudb.openhufu.plan.LeafPlan;
import com.hufudb.openhufu.proto.OpenHuFuData.ColumnType;
import com.hufudb.openhufu.proto.OpenHuFuData.Modifier;
import com.hufudb.openhufu.proto.OpenHuFuPlan;
import com.hufudb.openhufu.proto.OpenHuFuPlan.Collation;
import com.hufudb.openhufu.proto.OpenHuFuPlan.JoinCondition;
import com.hufudb.openhufu.proto.OpenHuFuPlan.JoinType;
import com.hufudb.openhufu.proto.OpenHuFuPlan.Expression;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
Expand All @@ -42,13 +35,13 @@ public static void setUp() throws IOException {

List<String> endpoints =
new Gson().fromJson(Files.newBufferedReader(
Path.of(OpenHuFuBenchmark.class.getClassLoader().getResource("spatialEndpoints.json")
Path.of(OpenHuFuBenchmark.class.getClassLoader().getResource("spatial-endpoints.json")
.getPath())),
new TypeToken<ArrayList<String>>() {
}.getType());
List<GlobalTableConfig> globalTableConfigs =
new Gson().fromJson(Files.newBufferedReader(
Path.of(OpenHuFuBenchmark.class.getClassLoader().getResource("spatialTables.json")
Path.of(OpenHuFuBenchmark.class.getClassLoader().getResource("spatial-tables.json")
.getPath())),
new TypeToken<ArrayList<GlobalTableConfig>>() {
}.getType());
Expand Down Expand Up @@ -84,4 +77,31 @@ public void testSelect() {
dataset.close();
}

@Test
public void testSpatialDistance() {
String tableName = SpatialTableName.SPATIAL.getName();
LeafPlan plan = new LeafPlan();
plan.setTableName(tableName);
plan.setSelectExps(ExpressionFactory.createInputRef(user.getOpenHuFuTableSchema(tableName).getSchema()));
// select * from spatial where DWithin(S_POINT, Point(1404050.076199729, -4762163.267865509), 0.1);
Expression pointFunc =
ExpressionFactory.createScalarFunc(ColumnType.POINT, "Point",
ImmutableList.of(ExpressionFactory.createLiteral(ColumnType.DOUBLE, 1404050.076199729),
ExpressionFactory.createLiteral(ColumnType.DOUBLE, -4762163.267865509)));
Expression dwithinFunc =
ExpressionFactory.createScalarFunc(ColumnType.BOOLEAN, "DWithin",
ImmutableList.of(ExpressionFactory.createInputRef(1, ColumnType.POINT, Modifier.PUBLIC),
pointFunc, ExpressionFactory.createLiteral(ColumnType.DOUBLE, 0.1)));
plan.setWhereExps(ImmutableList.of(dwithinFunc));
DataSet dataset = user.executeQuery(plan);
DataSetIterator it = dataset.getIterator();
int count = 0;
assertEquals(2, it.size());
while (it.next()) {
assertEquals(0L, it.get(0));
count++;
}
assertEquals(1, count);
}

}
233 changes: 0 additions & 233 deletions plan/src/main/java/com/hufudb/openhufu/expression/BasicTranslator.java

This file was deleted.

12 changes: 0 additions & 12 deletions plan/src/main/java/com/hufudb/openhufu/expression/Translator.java

This file was deleted.

1 change: 0 additions & 1 deletion plan/src/main/java/com/hufudb/openhufu/udf/ScalarUDF.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,4 @@ public interface ScalarUDF {
String getName();
ColumnType getOutType(List<ColumnType> inTypes);
Object implement(List<Object> inputs);
String translate(String dataSource, List<String> inputs);
}
15 changes: 1 addition & 14 deletions plan/src/main/java/com/hufudb/openhufu/udf/UDFLoader.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,7 @@ private UDFLoader() {}

public static Map<String, ScalarUDF> loadScalarUDF(String scalarUDFDirectory) {
LOG.info("Load scalar udf from {}", scalarUDFDirectory);
File udfs[] = new File(scalarUDFDirectory).listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
return file.getName().endsWith(".jar");
}
});
File udfs[] = new File(scalarUDFDirectory).listFiles(file -> file.getName().endsWith(".jar"));
List<URL> udfURLs = new ArrayList<>(udfs.length);
for (File udf : udfs) {
try {
Expand Down Expand Up @@ -65,14 +60,6 @@ public static Object implementScalar(String funcName, List<Object> inputs) {
return UDFLoader.scalarUDFs.get(funcName).implement(inputs);
}

public static String translateScalar(String funcName, String dataSource, List<String> inputs) {
if (!UDFLoader.scalarUDFs.containsKey(funcName)) {
LOG.error("Unsupported scalar UDF {}", funcName);
throw new RuntimeException("Unsupported scalar UDF");
}
return UDFLoader.scalarUDFs.get(funcName).translate(dataSource, inputs);
}

public static ColumnType getScalarOutType(String funcName, List<Expression> inputs) {
return UDFLoader.scalarUDFs.get(funcName)
.getOutType(inputs.stream().map(in -> in.getOutType()).collect(Collectors.toList()));
Expand Down
1 change: 1 addition & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
<module>platform</module>
<module>proto</module>
<module>rpc</module>
<module>udf</module>
</modules>

<properties>
Expand Down
5 changes: 5 additions & 0 deletions scripts/build/package.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,19 @@ if [ $# -eq 0 ]; then
mvn clean install -T ${thread} -Dmaven.test.skip=true
mkdir -p ./release/bin
mkdir -p ./release/adapter
mkdir -p ./release/udf/scalar
cp owner/target/*-with-dependencies.jar ./release/bin/owner_server.jar
cp adapter/adapter-csv/target/*-with-dependencies.jar ./release/adapter/adapter_csv.jar
cp udf/spatial-udf/target/*-with-dependencies.jar ./release/udf/scalar/spatial_udf.jar
elif [ $1 == "owner" ]; then
mvn install -T ${thread} -Dmaven.test.skip=true -pl $1
cp owner/target/*-with-dependencies.jar ./release/bin/onedb_owner_server.jar
elif [ $1 == "adapter" ]; then
mvn install -T ${thread} -Dmaven.test.skip=true -pl $1
cp adapter/adapter-csv/target/*-with-dependencies.jar ./release/adapter/adapter_csv.jar
elif [ $1 == "udf" ]; then
mvn install -T ${thread} -Dmaven.test.skip=true -pl $1
cp udf/spatial-udf/target/*-with-dependencies.jar ./release/udf/scalar/spatial_udf.jar
elif [ $1 == "benchmark" ]; then
mvn install -T ${thread} -Dmaven.test.skip=true -pl $1
else
Expand Down
Loading

0 comments on commit 10f41e8

Please sign in to comment.