Skip to content

Commit

Permalink
fix: trx trace callvalue bug, suppress info level logs in container
Browse files Browse the repository at this point in the history
  • Loading branch information
soad003 committed Feb 12, 2024
1 parent 8b67cfc commit d3bd0df
Show file tree
Hide file tree
Showing 8 changed files with 27 additions and 10 deletions.
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@ All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

## [Unreleased]
## [24.01rc1]
### Fixed
- excessive logging in container
- tron.trace callvalue overflow int -> bigint
### Changed
- Upgrade to Spark 3.4.2
- Upgrade DataStax Spark Cassandra connector to 3.4.1
Expand Down
1 change: 1 addition & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ RUN sbt package && \
cp target/scala-2.12/graphsense-spark*.jar graphsense-spark.jar

ADD docker/ .
RUN mv log4j2.properties /opt/spark/conf && cp /opt/spark/conf/log4j2.properties /opt/spark/conf/log4j.properties && cp /opt/spark/conf/log4j2.properties /opt/spark/conf/log4j2.default

USER dockeruser

Expand Down
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
RELEASE := 'v24.02.beta1'
lRELEASE := 'v24.02rc1'
# RELEASESEM := 'v1.6.2'

all: format lint build
Expand Down Expand Up @@ -66,4 +66,4 @@ tag-version:
git diff --exit-code && git diff --staged --exit-code && git tag -a $(RELEASE) -m 'Release $(RELEASE)' || (echo "Repo is dirty please commit first" && exit 1)


.PHONY: all test lint format build tag-version start-local-cassandra stop-local-cassandra run-local-transform build-docker test-account test-utxo test-common
.PHONY: all test lint format build tag-version start-local-cassandra stop-local-cassandra run-local-transform build-docker test-account test-utxo test-common
12 changes: 12 additions & 0 deletions docker/log4j2.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Set root logger level to desired log level (DEBUG, INFO, WARN, ERROR, or FATAL)
log4j.rootLogger=ERROR, console

# Define console appender
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n

# Set log level for specific packages or classes
# For example, to set the log level to WARN for the org.apache.spark package:
log4j.logger.org.apache.spark=WARN
2 changes: 1 addition & 1 deletion src/main/scala/org/graphsense/account/trx/Model.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ case class Trace(
transfertoAddress: Option[Array[Byte]],
// callInfoIndex: Int,
callTokenId: Option[Int],
callValue: Int,
callValue: BigInt,
note: String,
rejected: Boolean,
txHash: Option[Array[Byte]]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,10 @@ class TrxTransformation(spark: SparkSession, bucketSize: Int) {
ds.filter(isTrxTrace && isSuccessfulTrace && isCallTrace)
}

def onlySuccessfulTrxTraces[T](ds: Dataset[T]): Dataset[T] = {
ds.filter(isTrxTrace && isSuccessfulTrace)
}

def joinAddressIds[T](
addressIds: Dataset[AddressId],
addressCol: String = "address"
Expand Down Expand Up @@ -106,7 +110,6 @@ class TrxTransformation(spark: SparkSession, bucketSize: Int) {
ethTransform.computeExchangeRates(blocks, exchangeRates)
}

// TODO: remove old compute with balances if this is right.
def computeBalancesWithFeesTable(
transactions: Dataset[Transaction],
txFees: Dataset[TxFee],
Expand Down Expand Up @@ -622,7 +625,7 @@ class TrxTransformation(spark: SparkSession, bucketSize: Int) {
.transform(removeUnknownRecipientTxs)

val trcs = traces
.transform(onlySuccessfulTrxCallTraces)
.transform(onlySuccessfulTrxTraces)
.filter($"txHash".isNotNull)

val txsEncodedtemp = txs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -208,8 +208,6 @@ class TransformationTest extends TestBase {

note("Test blocks")

blockTransactions.show(100)
blockTransactionsRef.show(100)
test("Block transactions") {
assertDataFrameEquality(blockTransactions, blockTransactionsRef)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -363,8 +363,8 @@ class TransformationTest extends TestBase {
)
assert(data.output.addressIds.count() == 50527, "expected 50527 addresses")
assert(
data.output.addressRelations.count() == 77541,
"expected 77541 address relations"
data.output.addressRelations.count() == 77633,
"expected 77633 address relations"
)

assert(
Expand Down

0 comments on commit d3bd0df

Please sign in to comment.