From 262918be31616d0e7548855bd8d172d201af5a44 Mon Sep 17 00:00:00 2001 From: Alex Theimer Date: Fri, 23 Feb 2024 13:48:19 -0800 Subject: [PATCH] reanames for clarity --- .../scala/filodb/downsampler/chunk/DownsamplerMain.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spark-jobs/src/main/scala/filodb/downsampler/chunk/DownsamplerMain.scala b/spark-jobs/src/main/scala/filodb/downsampler/chunk/DownsamplerMain.scala index 3f8376d3b2..c64a47b63c 100644 --- a/spark-jobs/src/main/scala/filodb/downsampler/chunk/DownsamplerMain.scala +++ b/spark-jobs/src/main/scala/filodb/downsampler/chunk/DownsamplerMain.scala @@ -203,12 +203,12 @@ class Downsampler(settings: DownsamplerSettings) extends Serializable { val exportKeyToRules = settings.exportKeyToRules.map(f => (f._1, f._2)).toSeq val exportTasks = { // downsample the data as the first key is exported - val headTask = Seq(() => + val firstExportTaskWithDs = Seq(() => exportForKey(rddWithDs, exportKeyToRules.head._1, exportKeyToRules.head._2, batchExporter, spark)) // export all remaining keys without the downsample step - val tailTasks = exportKeyToRules.tail.map{spec => () => + val remainingExportTasksWithoutDs = exportKeyToRules.tail.map{spec => () => exportForKey(rdd, spec._1, spec._2, batchExporter, spark)} - headTask ++ tailTasks + firstExportTaskWithDs ++ remainingExportTasksWithoutDs } // export/downsample RDDs in parallel exportTasks.par.foreach(_.apply())