Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refact: fix some code style with check plugin #385

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,10 @@

import org.apache.hugegraph.api.graph.GraphAPI;
import org.apache.hugegraph.client.RestClient;

import org.apache.hugegraph.rest.RestResult;

import org.apache.hugegraph.structure.constant.Direction;
import org.apache.hugegraph.structure.traverser.SingleSourceJaccardSimilarityRequest;


import org.apache.hugegraph.util.E;

public class JaccardSimilarityAPI extends TraversersAPI {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.apache.hugegraph.exception;


import org.apache.hugegraph.rest.ClientException;

public class InvalidOperationException extends ClientException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,15 @@
import org.apache.hugegraph.rest.RestResult;

import jakarta.ws.rs.core.Response;
import org.apache.hugegraph.util.Log;
import org.slf4j.Logger;

public class ServerException extends RuntimeException {

private static final long serialVersionUID = 6335623004322652358L;

protected static final Logger LOG = Log.logger(ServerException.class);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

mark as private?


private static final String[] EXCEPTION_KEYS = {"exception",
"Exception-Class"};
private static final String[] MESSAGE_KEYS = {"message"};
Expand All @@ -54,6 +58,7 @@
exception.cause = (String) getByKeys(json, CAUSE_KEYS);
exception.trace = getByKeys(json, TRACE_KEYS);
} catch (Exception ignored) {
LOG.error("ServerException fromResponse excepiton");

Check warning on line 61 in hugegraph-client/src/main/java/org/apache/hugegraph/exception/ServerException.java

View check run for this annotation

Codecov / codecov/patch

hugegraph-client/src/main/java/org/apache/hugegraph/exception/ServerException.java#L61

Added line #L61 was not covered by tests
}

return exception;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,7 @@
private HugeClient client;
private GraphSchema graphSchema;


public HBaseSerializer(HugeClient client, int vertexPartitions, int edgePartitions){
public HBaseSerializer(HugeClient client, int vertexPartitions, int edgePartitions) {

Check warning on line 45 in hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/HBaseSerializer.java

View check run for this annotation

Codecov / codecov/patch

hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/HBaseSerializer.java#L45

Added line #L45 was not covered by tests
this.client = client;
this.graphSchema = new GraphSchema(client);
this.edgeLogicPartitions = edgePartitions;
Expand All @@ -52,12 +51,12 @@

public byte[] getKeyBytes(GraphElement e) {
byte[] array = null;
if(e.type() == "vertex" && e.id() != null){
if (e.type() == "vertex" && e.id() != null) {
BytesBuffer buffer = BytesBuffer.allocate(2 + 1 + e.id().toString().length());
buffer.writeShort(getPartition(HugeType.VERTEX, IdGenerator.of(e.id())));
buffer.writeId(IdGenerator.of(e.id()));
array = buffer.bytes();
}else if ( e.type() == "edge" ){
} else if (e.type() == "edge") {
BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID);
Edge edge = (Edge)e;
buffer.writeShort(getPartition(HugeType.EDGE, IdGenerator.of(edge.sourceId())));
Expand All @@ -73,22 +72,22 @@

public byte[] getValueBytes(GraphElement e) {
byte[] array = null;
if(e.type() == "vertex"){
int propsCount = e.properties().size() ; //vertex.sizeOfProperties();
if (e.type() == "vertex") {
int propsCount = e.properties().size(); //vertex.sizeOfProperties();

Check warning on line 76 in hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/HBaseSerializer.java

View check run for this annotation

Codecov / codecov/patch

hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/HBaseSerializer.java#L76

Added line #L76 was not covered by tests
BytesBuffer buffer = BytesBuffer.allocate(8 + 16 * propsCount);
buffer.writeId(IdGenerator.of(graphSchema.getVertexLabel(e.label()).id()));
buffer.writeVInt(propsCount);
for(Map.Entry<String, Object> entry : e.properties().entrySet()){
for (Map.Entry<String, Object> entry : e.properties().entrySet()) {
PropertyKey propertyKey = graphSchema.getPropertyKey(entry.getKey());
buffer.writeVInt(propertyKey.id().intValue());
buffer.writeProperty(propertyKey.dataType(),entry.getValue());
}
array = buffer.bytes();
} else if ( e.type() == "edge" ){
} else if (e.type() == "edge") {
int propsCount = e.properties().size();
BytesBuffer buffer = BytesBuffer.allocate(4 + 16 * propsCount);
buffer.writeVInt(propsCount);
for(Map.Entry<String, Object> entry : e.properties().entrySet()){
for (Map.Entry<String, Object> entry : e.properties().entrySet()) {
PropertyKey propertyKey = graphSchema.getPropertyKey(entry.getKey());
buffer.writeVInt(propertyKey.id().intValue());
buffer.writeProperty(propertyKey.dataType(),entry.getValue());
Expand All @@ -110,15 +109,15 @@
return partition > 0 ? partition : (short) -partition;
}

public int getEdgeLogicPartitions(){
public int getEdgeLogicPartitions() {
return this.edgeLogicPartitions;
}

public int getVertexLogicPartitions(){
public int getVertexLogicPartitions() {
return this.vertexLogicPartitions;
}

public void close(){
public void close() {
this.client.close();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@

SchemaManager schema = client.schema();


schema.propertyKey("name").asText().ifNotExist().create();
schema.propertyKey("age").asInt().ifNotExist().create();
schema.propertyKey("lang").asText().ifNotExist().create();
Expand Down Expand Up @@ -128,15 +127,14 @@
.property("date", "2017-03-24");

List<Vertex> vertices = new ArrayList<Vertex>() {{
add(peter);
add(lop);
add(vadasB);
}};

add(peter);
add(lop);
add(vadasB);
}};

Check warning on line 133 in hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/reuse/BytesDemo.java

View check run for this annotation

Codecov / codecov/patch

hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/reuse/BytesDemo.java#L130-L133

Added lines #L130 - L133 were not covered by tests
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we keep the origin style?


List<Edge> edges = new ArrayList<Edge>() {{
add(peterCreateLop);
}};
add(peterCreateLop);
}};

Check warning on line 137 in hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/reuse/BytesDemo.java

View check run for this annotation

Codecov / codecov/patch

hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/reuse/BytesDemo.java#L136-L137

Added lines #L136 - L137 were not covered by tests
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we keep the origin style?


// Old way: encode to json then send to server
if (bypassServer) {
Expand Down Expand Up @@ -188,7 +186,6 @@
return flag;
}


boolean put(String type, byte[] rowkey, byte[] values) throws IOException {
// TODO: put to HBase
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,19 +30,19 @@ public abstract class IdGenerator {

public static final Id ZERO = IdGenerator.of(0L);

public final static Id of(String id) {
public static final Id of(String id) {
return new StringId(id);
}

public final static Id of(UUID id) {
public static final Id of(UUID id) {
return new UuidId(id);
}

public final static Id of(String id, boolean uuid) {
public static final Id of(String id, boolean uuid) {
return uuid ? new UuidId(id) : new StringId(id);
}

public final static Id of(long id) {
public static final Id of(long id) {
return new LongId(id);
}

Expand All @@ -59,7 +59,7 @@ public static Id of(Object id) {
return new ObjectId(id);
}

public final static Id of(byte[] bytes, Id.IdType type) {
public static final Id of(byte[] bytes, Id.IdType type) {
switch (type) {
case LONG:
return new LongId(bytes);
Expand All @@ -72,7 +72,7 @@ public final static Id of(byte[] bytes, Id.IdType type) {
}
}

public final static Id ofStoredString(String id, Id.IdType type) {
public static final Id ofStoredString(String id, Id.IdType type) {
switch (type) {
case LONG:
return of(LongEncoding.decodeSignedB64(id));
Expand All @@ -86,7 +86,7 @@ public final static Id ofStoredString(String id, Id.IdType type) {
}
}

public final static String asStoredString(Id id) {
public static final String asStoredString(Id id) {
switch (id.type()) {
case LONG:
return LongEncoding.encodeSignedB64(id.asLong());
Expand All @@ -99,7 +99,7 @@ public final static String asStoredString(Id id) {
}
}

public final static Id.IdType idType(Id id) {
public static final Id.IdType idType(Id id) {
if (id instanceof LongId) {
return Id.IdType.LONG;
}
Expand Down
14 changes: 14 additions & 0 deletions hugegraph-dist/pom.xml
Original file line number Diff line number Diff line change
@@ -1,4 +1,18 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@

import org.apache.hugegraph.exception.ExternalException;
import org.apache.hugegraph.options.HubbleOptions;
import org.apache.hugegraph.util.Log;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.context.annotation.Bean;
Expand All @@ -33,6 +35,8 @@
@Configuration
public class HubbleConfig {

protected static final Logger LOG = Log.logger(HubbleConfig.class);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ditto


@Autowired
private ApplicationArguments arguments;

Expand All @@ -57,6 +61,7 @@ public HugeConfig hugeConfig() {
conf = path;
}
} catch (Exception ignored) {
LOG.error("hugeConfig exception");
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same question

}
return new HugeConfig(conf);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ public LoadTask retry(@PathVariable("connId") int connId,
return this.service.retry(taskId);
} finally {
jobEntity.setJobStatus(JobStatus.LOADING);
jobEntity.setUpdateTime( HubbleUtil.nowDate());
jobEntity.setUpdateTime(HubbleUtil.nowDate());
this.jobService.update(jobEntity);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

package org.apache.hugegraph.license;

import org.apache.hugegraph.license.MachineInfo;

public final class ServerInfo {

private final String serverId;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,8 @@ private FileSource buildFileSource(FileMapping fileMapping) {
Ex.check(idFields.size() == 1,
"When the ID strategy is CUSTOMIZED, you must " +
"select a column in the file as the id");
vMapping = new org.apache.hugegraph.loader.mapping.VertexMapping(idFields.get(0), true);
vMapping = new org.apache.hugegraph.loader.mapping.VertexMapping(
idFields.get(0), true);
Comment on lines +439 to +440
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if line < 100, use one line instead

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if one line,there is 105 char
image

} else {
assert vl.getIdStrategy().isPrimaryKey();
List<String> primaryKeys = vl.getPrimaryKeys();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,6 @@ public IPage<ExecuteHistory> list(int connId, long current, long pageSize) {
return results;
}


public ExecuteHistory get(int connId, int id) {
HugeClient client = this.getClient(connId);
ExecuteHistory history = this.mapper.selectById(id);
Expand All @@ -112,9 +111,9 @@ public ExecuteHistory get(int connId, int id) {

@Transactional(isolation = Isolation.READ_COMMITTED)
public void save(ExecuteHistory history) {
if (this.mapper.insert(history) != 1) {
throw new InternalException("entity.insert.failed", history);
}
if (this.mapper.insert(history) != 1) {
throw new InternalException("entity.insert.failed", history);
}
}

@Transactional(isolation = Isolation.READ_COMMITTED)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,17 +45,17 @@ public final class GremlinUtil {
);

private static final String[] COMPILE_SEARCH_LIST = new String[]{
".", "(", ")"
".", "(", ")"
};
private static final String[] COMPILE_TARGET_LIST = new String[]{
"\\.", "\\(", "\\)"
"\\.", "\\(", "\\)"
};

private static final String[] ESCAPE_SEARCH_LIST = new String[]{
"\\", "\"", "'", "\n"
"\\", "\"", "'", "\n"
};
private static final String[] ESCAPE_TARGET_LIST = new String[]{
"\\\\", "\\\"", "\\'", "\\n"
"\\\\", "\\\"", "\\'", "\\n"
};

private static final Set<Pattern> LIMIT_PATTERNS = compile(LIMIT_SUFFIXES);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import org.apache.hugegraph.util.TimeUtil;

public final class SerializeUtil {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,18 +183,18 @@
Map<String, UpdateStrategy> updateStrategyMap =
elementMapping.updateStrategies();
if (isVertex) {
BatchVertexRequest.Builder req = new BatchVertexRequest.Builder();
req.vertices((List<Vertex>) (Object) graphElements)
.updatingStrategies(updateStrategyMap)
.createIfNotExist(true);
g.updateVertices(req.build());
BatchVertexRequest.Builder request = new BatchVertexRequest.Builder();
request.vertices((List<Vertex>) (Object) graphElements)
.updatingStrategies(updateStrategyMap)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

align with '.'

.createIfNotExist(true);
g.updateVertices(request.build());

Check warning on line 190 in hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphOutputFormat.java

View check run for this annotation

Codecov / codecov/patch

hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphOutputFormat.java#L186-L190

Added lines #L186 - L190 were not covered by tests
} else {
BatchEdgeRequest.Builder req = new BatchEdgeRequest.Builder();
req.edges((List<Edge>) (Object) graphElements)
.updatingStrategies(updateStrategyMap)
.checkVertex(this.loadOptions.checkVertex)
.createIfNotExist(true);
g.updateEdges(req.build());
BatchEdgeRequest.Builder request = new BatchEdgeRequest.Builder();
request.edges((List<Edge>) (Object) graphElements)
.updatingStrategies(updateStrategyMap)
.checkVertex(this.loadOptions.checkVertex)
.createIfNotExist(true);
g.updateEdges(request.build());

Check warning on line 197 in hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphOutputFormat.java

View check run for this annotation

Codecov / codecov/patch

hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphOutputFormat.java#L192-L197

Added lines #L192 - L197 were not covered by tests
}
break;
case DELETE:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
package org.apache.hugegraph.loader.reader.file;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,15 @@ public void rawLine(String rawLine) {
this.rawLine = rawLine;
}

public final String[] names() {
public String[] names() {
return this.names;
}

public void names(String[] names) {
this.names = names;
}

public final Object[] values() {
public Object[] values() {
return this.values;
}

Expand Down
Loading