From d2cc6b5fa39d33ef79952d236c1a8eeb3884e026 Mon Sep 17 00:00:00 2001 From: Yu Zhang Date: Wed, 9 Oct 2024 09:08:50 -0700 Subject: [PATCH] fix(query) the schema provided by _type_ does not match colIDs in the data. --- core/src/main/scala/filodb.core/metadata/Schemas.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/filodb.core/metadata/Schemas.scala b/core/src/main/scala/filodb.core/metadata/Schemas.scala index f486f620e1..f15b701ce5 100644 --- a/core/src/main/scala/filodb.core/metadata/Schemas.scala +++ b/core/src/main/scala/filodb.core/metadata/Schemas.scala @@ -308,7 +308,8 @@ final case class Schemas(part: PartitionSchema, val numSamplesPerChunk = chunkDurationMillis / resolutionMs // find number of chunks to be scanned. Ceil division needed here val numChunksPerTs = (queryDurationMs + chunkDurationMillis - 1) / chunkDurationMillis - val bytesPerSample = colIds.map(c => bytesPerSampleSwag((schemaId, c))).sum + // The schema provided does not match existing, give the the sample a weight of histogram. + val bytesPerSample = colIds.map(c => bytesPerSampleSwag.getOrElse((schemaId, c), 20.0)).sum val estDataSize = bytesPerSample * numTsPartitions * numSamplesPerChunk * numChunksPerTs estDataSize } @@ -327,7 +328,8 @@ final case class Schemas(part: PartitionSchema, chunkMethod: ChunkScanMethod ): Double = { val numSamplesPerChunk = chunkDurationMillis / resolutionMs - val bytesPerSample = colIds.map(c => bytesPerSampleSwag((schemaId, c))).sum + // The schema provided does not match existing, give the the sample a weight of histogram. + val bytesPerSample = colIds.map(c => bytesPerSampleSwag.getOrElse((schemaId, c), 20.0)).sum var estDataSize = 0d pkRecs.foreach { pkRec => val intersection = Math.min(chunkMethod.endTime, pkRec.endTime) - Math.max(chunkMethod.startTime, pkRec.startTime)