diff --git a/RELEASE.md b/RELEASE.md index 70bad5b3..dbc3895f 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,5 +1,6 @@ # Release Notes -## [4.3.11] - 2024-TBD +## [4.4.0] - 2024-09-19 +- Added property `spark.cdm.connect.origin.tls.isAstra` and `spark.cdm.connect.target.tls.isAstra` to allow connecting to Astra DB without using [SCB](https://docs.datastax.com/en/astra-db-serverless/drivers/secure-connect-bundle.html). This may be needed for enterprises that may find credentials packaged within SCB as a security risk. TLS properties can now be passed as params OR wrapper scripts (not included) could be used to pull sensitive credentials from a vault service in real-time & pass them to CDM. - Switched to using Apache Cassandra® `5.0` docker image for testing - Introduces smoke testing of `vector` CQL data type diff --git a/src/main/java/com/datastax/cdm/cql/statement/BaseCdmStatement.java b/src/main/java/com/datastax/cdm/cql/statement/BaseCdmStatement.java index 151ce7d2..4280183f 100644 --- a/src/main/java/com/datastax/cdm/cql/statement/BaseCdmStatement.java +++ b/src/main/java/com/datastax/cdm/cql/statement/BaseCdmStatement.java @@ -21,7 +21,7 @@ import com.datastax.cdm.cql.EnhancedSession; import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.cdm.schema.CqlTable; -import com.datastax.oss.driver.api.core.cql.*; +import com.datastax.oss.driver.api.core.cql.PreparedStatement; public class BaseCdmStatement { diff --git a/src/main/java/com/datastax/cdm/data/CqlConversion.java b/src/main/java/com/datastax/cdm/data/CqlConversion.java index fc52c458..46e4c125 100644 --- a/src/main/java/com/datastax/cdm/data/CqlConversion.java +++ b/src/main/java/com/datastax/cdm/data/CqlConversion.java @@ -19,7 +19,6 @@ import java.util.*; import java.util.stream.Collectors; -import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/src/main/java/com/datastax/cdm/data/DataUtility.java b/src/main/java/com/datastax/cdm/data/DataUtility.java index 7ce2cbd4..76d6d003 100644 --- a/src/main/java/com/datastax/cdm/data/DataUtility.java +++ b/src/main/java/com/datastax/cdm/data/DataUtility.java @@ -15,7 +15,13 @@ */ package com.datastax.cdm.data; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; import java.util.*; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -26,6 +32,7 @@ public class DataUtility { public static final Logger logger = LoggerFactory.getLogger(CqlConversion.class); + protected static final String SCB_FILE_NAME = "_temp_cdm_scb_do_not_touch.zip"; public static boolean diff(Object obj1, Object obj2) { if (obj1 == null && obj2 == null) { @@ -143,4 +150,68 @@ public static String getMyClassMethodLine(Exception e) { return "Unknown"; } + + public static void deleteGeneratedSCB() { + File file = new File(PKFactory.Side.ORIGIN + SCB_FILE_NAME); + if (file.exists()) { + file.delete(); + } + file = new File(PKFactory.Side.TARGET + SCB_FILE_NAME); + if (file.exists()) { + file.delete(); + } + } + + public static File generateSCB(String host, String port, String trustStorePassword, String trustStorePath, + String keyStorePassword, String keyStorePath, PKFactory.Side side) throws IOException { + FileOutputStream fileOutputStream = new FileOutputStream("config.json"); + String scbJson = new StringBuilder("{\"host\": \"").append(host).append("\", \"port\": ").append(port) + .append(", \"keyStoreLocation\": \"./identity.jks\", \"keyStorePassword\": \"").append(keyStorePassword) + .append("\", \"trustStoreLocation\": \"./trustStore.jks\", \"trustStorePassword\": \"") + .append(trustStorePassword).append("\"}").toString(); + fileOutputStream.write(scbJson.getBytes()); + fileOutputStream.close(); + File configFile = new File("config.json"); + FilePathAndNewName configFileWithName = new FilePathAndNewName(configFile, "config.json"); + FilePathAndNewName keyFileWithName = new FilePathAndNewName(new File(keyStorePath), "identity.jks"); + FilePathAndNewName trustFileWithName = new FilePathAndNewName(new File(trustStorePath), "trustStore.jks"); + File zipFile = zip(Arrays.asList(configFileWithName, keyFileWithName, trustFileWithName), side + SCB_FILE_NAME); + configFile.delete(); + + return zipFile; + } + + private static File zip(List files, String filename) { + File zipfile = new File(filename); + byte[] buf = new byte[1024]; + try { + ZipOutputStream out = new ZipOutputStream(new FileOutputStream(zipfile)); + for (int i = 0; i < files.size(); i++) { + out.putNextEntry(new ZipEntry(files.get(i).name)); + FileInputStream in = new FileInputStream(files.get(i).file.getCanonicalPath()); + int len; + while ((len = in.read(buf)) > 0) { + out.write(buf, 0, len); + } + out.closeEntry(); + in.close(); + } + out.close(); + + return zipfile; + } catch (IOException ex) { + logger.error("Unable to write out zip file: {}. Got exception: {}", filename, ex.getMessage()); + } + return null; + } + + static class FilePathAndNewName { + File file; + String name; + + public FilePathAndNewName(File file, String name) { + this.file = file; + this.name = name; + } + } } diff --git a/src/main/java/com/datastax/cdm/job/AbstractJobSession.java b/src/main/java/com/datastax/cdm/job/AbstractJobSession.java index 8d783ce2..5c718547 100644 --- a/src/main/java/com/datastax/cdm/job/AbstractJobSession.java +++ b/src/main/java/com/datastax/cdm/job/AbstractJobSession.java @@ -22,6 +22,7 @@ import org.slf4j.LoggerFactory; import com.datastax.cdm.cql.EnhancedSession; +import com.datastax.cdm.data.DataUtility; import com.datastax.cdm.data.PKFactory; import com.datastax.cdm.feature.Feature; import com.datastax.cdm.feature.Featureset; @@ -114,6 +115,7 @@ public synchronized void initCdmRun(long runId, long prevRunId, Collection { diff --git a/src/main/java/com/datastax/cdm/properties/KnownProperties.java b/src/main/java/com/datastax/cdm/properties/KnownProperties.java index c3bdd05d..b35d83dd 100644 --- a/src/main/java/com/datastax/cdm/properties/KnownProperties.java +++ b/src/main/java/com/datastax/cdm/properties/KnownProperties.java @@ -279,6 +279,7 @@ public enum PropertyType { public static final String ORIGIN_TLS_KEYSTORE_PATH = "spark.cdm.connect.origin.tls.keyStore.path"; public static final String ORIGIN_TLS_KEYSTORE_PASSWORD = "spark.cdm.connect.origin.tls.keyStore.password"; public static final String ORIGIN_TLS_ALGORITHMS = "spark.cdm.connect.origin.tls.enabledAlgorithms"; // TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA + public static final String ORIGIN_TLS_IS_ASTRA = "spark.cdm.connect.origin.tls.isAstra"; static { types.put(ORIGIN_TLS_ENABLED, PropertyType.BOOLEAN); defaults.put(ORIGIN_TLS_ENABLED, "false"); @@ -290,6 +291,8 @@ public enum PropertyType { types.put(ORIGIN_TLS_KEYSTORE_PASSWORD, PropertyType.STRING); types.put(ORIGIN_TLS_ALGORITHMS, PropertyType.STRING); // This is a list but it is handled by Spark defaults.put(ORIGIN_TLS_ALGORITHMS, "TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA"); + types.put(ORIGIN_TLS_IS_ASTRA, PropertyType.BOOLEAN); + defaults.put(ORIGIN_TLS_IS_ASTRA, "false"); } // ========================================================================== @@ -302,6 +305,7 @@ public enum PropertyType { public static final String TARGET_TLS_KEYSTORE_PATH = "spark.cdm.connect.target.tls.keyStore.path"; public static final String TARGET_TLS_KEYSTORE_PASSWORD = "spark.cdm.connect.target.tls.keyStore.password"; public static final String TARGET_TLS_ALGORITHMS = "spark.cdm.connect.target.tls.enabledAlgorithms"; // TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA + public static final String TARGET_TLS_IS_ASTRA = "spark.cdm.connect.target.tls.isAstra"; static { types.put(TARGET_TLS_ENABLED, PropertyType.BOOLEAN); defaults.put(TARGET_TLS_ENABLED, "false"); @@ -313,6 +317,8 @@ public enum PropertyType { types.put(TARGET_TLS_KEYSTORE_PASSWORD, PropertyType.STRING); types.put(TARGET_TLS_ALGORITHMS, PropertyType.STRING); // This is a list but it is handled by Spark defaults.put(TARGET_TLS_ALGORITHMS, "TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA"); + types.put(TARGET_TLS_IS_ASTRA, PropertyType.BOOLEAN); + defaults.put(TARGET_TLS_IS_ASTRA, "false"); } // ========================================================================== diff --git a/src/main/scala/com/datastax/cdm/job/BaseJob.scala b/src/main/scala/com/datastax/cdm/job/BaseJob.scala index 0cb3a360..29162e25 100644 --- a/src/main/scala/com/datastax/cdm/job/BaseJob.scala +++ b/src/main/scala/com/datastax/cdm/job/BaseJob.scala @@ -21,6 +21,7 @@ import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.rdd.RDD import org.apache.spark.sql.SparkSession import org.slf4j.LoggerFactory +import com.datastax.cdm.data.PKFactory.Side import java.math.BigInteger import java.util @@ -70,8 +71,8 @@ abstract class BaseJob[T: ClassTag] extends App { consistencyLevel = propertyHelper.getString(KnownProperties.READ_CL) val connectionFetcher = new ConnectionFetcher(sContext, propertyHelper) - originConnection = connectionFetcher.getConnection("ORIGIN", consistencyLevel) - targetConnection = connectionFetcher.getConnection("TARGET", consistencyLevel) + originConnection = connectionFetcher.getConnection(Side.ORIGIN, consistencyLevel) + targetConnection = connectionFetcher.getConnection(Side.TARGET, consistencyLevel) val hasRandomPartitioner: Boolean = { val partitionerName = originConnection.withSessionDo(_.getMetadata.getTokenMap.get().getPartitionerName) diff --git a/src/main/scala/com/datastax/cdm/job/ConnectionDetails.scala b/src/main/scala/com/datastax/cdm/job/ConnectionDetails.scala index 3d987b7d..bef8f664 100644 --- a/src/main/scala/com/datastax/cdm/job/ConnectionDetails.scala +++ b/src/main/scala/com/datastax/cdm/job/ConnectionDetails.scala @@ -27,5 +27,6 @@ case class ConnectionDetails( trustStoreType: String, keyStorePath: String, keyStorePassword: String, - enabledAlgorithms: String + enabledAlgorithms: String, + isAstra: Boolean ) \ No newline at end of file diff --git a/src/main/scala/com/datastax/cdm/job/ConnectionFetcher.scala b/src/main/scala/com/datastax/cdm/job/ConnectionFetcher.scala index 9a17b100..419fec1d 100644 --- a/src/main/scala/com/datastax/cdm/job/ConnectionFetcher.scala +++ b/src/main/scala/com/datastax/cdm/job/ConnectionFetcher.scala @@ -15,17 +15,19 @@ */ package com.datastax.cdm.job -import com.datastax.cdm.properties.{KnownProperties, PropertyHelper} +import com.datastax.cdm.properties.{KnownProperties, IPropertyHelper} import com.datastax.spark.connector.cql.CassandraConnector import org.apache.spark.{SparkConf, SparkContext} import org.slf4j.{Logger, LoggerFactory} +import com.datastax.cdm.data.DataUtility.generateSCB +import com.datastax.cdm.data.PKFactory.Side // TODO: CDM-31 - add localDC configuration support -class ConnectionFetcher(sparkContext: SparkContext, propertyHelper: PropertyHelper) { +class ConnectionFetcher(sparkContext: SparkContext, propertyHelper: IPropertyHelper) { val logger: Logger = LoggerFactory.getLogger(this.getClass.getName) - def getConnectionDetails(side: String): ConnectionDetails = { - if ("ORIGIN".equals(side.toUpperCase)) { + def getConnectionDetails(side: Side): ConnectionDetails = { + if (Side.ORIGIN.equals(side)) { ConnectionDetails( propertyHelper.getAsString(KnownProperties.CONNECT_ORIGIN_SCB), propertyHelper.getAsString(KnownProperties.CONNECT_ORIGIN_HOST), @@ -35,10 +37,11 @@ class ConnectionFetcher(sparkContext: SparkContext, propertyHelper: PropertyHelp propertyHelper.getAsString(KnownProperties.ORIGIN_TLS_ENABLED), propertyHelper.getAsString(KnownProperties.ORIGIN_TLS_TRUSTSTORE_PATH), propertyHelper.getAsString(KnownProperties.ORIGIN_TLS_TRUSTSTORE_PASSWORD), - propertyHelper.getString(KnownProperties.ORIGIN_TLS_TRUSTSTORE_TYPE), + propertyHelper.getAsString(KnownProperties.ORIGIN_TLS_TRUSTSTORE_TYPE), propertyHelper.getAsString(KnownProperties.ORIGIN_TLS_KEYSTORE_PATH), propertyHelper.getAsString(KnownProperties.ORIGIN_TLS_KEYSTORE_PASSWORD), - propertyHelper.getAsString(KnownProperties.ORIGIN_TLS_ALGORITHMS) + propertyHelper.getAsString(KnownProperties.ORIGIN_TLS_ALGORITHMS), + propertyHelper.getBoolean(KnownProperties.ORIGIN_TLS_IS_ASTRA) ) } else { @@ -51,15 +54,16 @@ class ConnectionFetcher(sparkContext: SparkContext, propertyHelper: PropertyHelp propertyHelper.getAsString(KnownProperties.TARGET_TLS_ENABLED), propertyHelper.getAsString(KnownProperties.TARGET_TLS_TRUSTSTORE_PATH), propertyHelper.getAsString(KnownProperties.TARGET_TLS_TRUSTSTORE_PASSWORD), - propertyHelper.getString(KnownProperties.TARGET_TLS_TRUSTSTORE_TYPE), + propertyHelper.getAsString(KnownProperties.TARGET_TLS_TRUSTSTORE_TYPE), propertyHelper.getAsString(KnownProperties.TARGET_TLS_KEYSTORE_PATH), propertyHelper.getAsString(KnownProperties.TARGET_TLS_KEYSTORE_PASSWORD), - propertyHelper.getAsString(KnownProperties.TARGET_TLS_ALGORITHMS) + propertyHelper.getAsString(KnownProperties.TARGET_TLS_ALGORITHMS), + propertyHelper.getBoolean(KnownProperties.TARGET_TLS_IS_ASTRA) ) } } - def getConnection(side: String, consistencyLevel: String): CassandraConnector = { + def getConnection(side: Side, consistencyLevel: String): CassandraConnector = { val connectionDetails = getConnectionDetails(side) val config: SparkConf = sparkContext.getConf @@ -72,6 +76,17 @@ class ConnectionFetcher(sparkContext: SparkContext, propertyHelper: PropertyHelp .set("spark.cassandra.auth.password", connectionDetails.password) .set("spark.cassandra.input.consistency.level", consistencyLevel) .set("spark.cassandra.connection.config.cloud.path", connectionDetails.scbPath)) + } else if (connectionDetails.trustStorePath.nonEmpty && connectionDetails.isAstra) { + logger.info("Connecting to Astra "+side+" (with truststore) using host metadata at "+connectionDetails.host+":"+connectionDetails.port); + + val scbFile = generateSCB(connectionDetails.host, connectionDetails.port, + connectionDetails.trustStorePassword, connectionDetails.trustStorePath, + connectionDetails.keyStorePassword, connectionDetails.keyStorePath, side) + return CassandraConnector(config + .set("spark.cassandra.auth.username", connectionDetails.username) + .set("spark.cassandra.auth.password", connectionDetails.password) + .set("spark.cassandra.input.consistency.level", consistencyLevel) + .set("spark.cassandra.connection.config.cloud.path", "file://" + scbFile.getAbsolutePath())) } else if (connectionDetails.trustStorePath.nonEmpty) { logger.info("Connecting to "+side+" (with truststore) at "+connectionDetails.host+":"+connectionDetails.port); diff --git a/src/resources/cdm-detailed.properties b/src/resources/cdm-detailed.properties index 500f579b..715a1a6a 100644 --- a/src/resources/cdm-detailed.properties +++ b/src/resources/cdm-detailed.properties @@ -441,6 +441,7 @@ spark.cdm.perfops.ratelimit.target 20000 # .path : Filepath to the Java keystore file # .password : Password needed to open the keystore # .enabledAlgorithms : Default is TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA +# .isAstra : Default is false. Set to true if connecting to DataStax Astra DB without SCB #----------------------------------------------------------------------------------------------------------- #spark.cdm.connect.origin.tls.enabled false #spark.cdm.connect.origin.tls.trustStore.path @@ -449,6 +450,7 @@ spark.cdm.perfops.ratelimit.target 20000 #spark.cdm.connect.origin.tls.keyStore.path #spark.cdm.connect.origin.tls.keyStore.password #spark.cdm.connect.origin.tls.enabledAlgorithms TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA +#spark.cdm.connect.origin.tls.isAstra false #spark.cdm.connect.target.tls.enabled false #spark.cdm.connect.target.tls.trustStore.path @@ -457,3 +459,4 @@ spark.cdm.perfops.ratelimit.target 20000 #spark.cdm.connect.target.tls.keyStore.path #spark.cdm.connect.target.tls.keyStore.password #spark.cdm.connect.target.tls.enabledAlgorithms TLS_RSA_WITH_AES_128_CBC_SHA,TLS_RSA_WITH_AES_256_CBC_SHA +#spark.cdm.connect.target.tls.isAstra false diff --git a/src/test/java/com/datastax/cdm/cql/codec/CodecFactoryTest.java b/src/test/java/com/datastax/cdm/cql/codec/CodecFactoryTest.java index ddcc8b28..f4e42423 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/CodecFactoryTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/CodecFactoryTest.java @@ -15,7 +15,9 @@ */ package com.datastax.cdm.cql.codec; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.List; @@ -23,7 +25,6 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.Mockito; import com.datastax.cdm.data.MockitoExtension; import com.datastax.cdm.properties.PropertyHelper; diff --git a/src/test/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodecTest.java b/src/test/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodecTest.java index 160fb705..cd825bed 100644 --- a/src/test/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodecTest.java +++ b/src/test/java/com/datastax/cdm/cql/codec/TEXTFormat_InstantCodecTest.java @@ -27,7 +27,6 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.mockito.Mock; import com.datastax.cdm.data.CqlConversion; import com.datastax.cdm.properties.KnownProperties; diff --git a/src/test/java/com/datastax/cdm/cql/statement/Feature_CounterTest.java b/src/test/java/com/datastax/cdm/cql/statement/Feature_CounterTest.java index 2f2d3cc6..5306c6de 100644 --- a/src/test/java/com/datastax/cdm/cql/statement/Feature_CounterTest.java +++ b/src/test/java/com/datastax/cdm/cql/statement/Feature_CounterTest.java @@ -15,8 +15,6 @@ */ package com.datastax.cdm.cql.statement; -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.mock; import java.util.Arrays; diff --git a/src/test/java/com/datastax/cdm/cql/statement/TargetUpsertRunDetailsStatementTest.java b/src/test/java/com/datastax/cdm/cql/statement/TargetUpsertRunDetailsStatementTest.java index abaf4eec..d78e8aa3 100644 --- a/src/test/java/com/datastax/cdm/cql/statement/TargetUpsertRunDetailsStatementTest.java +++ b/src/test/java/com/datastax/cdm/cql/statement/TargetUpsertRunDetailsStatementTest.java @@ -15,13 +15,14 @@ */ package com.datastax.cdm.cql.statement; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.when; import java.util.Collections; import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.IntStream; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -34,7 +35,6 @@ import com.datastax.oss.driver.api.core.cql.PreparedStatement; import com.datastax.oss.driver.api.core.cql.ResultSet; import com.datastax.oss.driver.api.core.cql.Row; -import com.datastax.oss.driver.api.core.type.DataTypes; public class TargetUpsertRunDetailsStatementTest extends CommonMocks { @Mock diff --git a/src/test/java/com/datastax/cdm/data/CqlConversionTest.java b/src/test/java/com/datastax/cdm/data/CqlConversionTest.java index 9ea7876b..46b965ff 100644 --- a/src/test/java/com/datastax/cdm/data/CqlConversionTest.java +++ b/src/test/java/com/datastax/cdm/data/CqlConversionTest.java @@ -15,11 +15,9 @@ */ package com.datastax.cdm.data; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.Mockito.*; - -import java.util.Collections; -import java.util.List; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/src/test/java/com/datastax/cdm/data/DataUtilityTest.java b/src/test/java/com/datastax/cdm/data/DataUtilityTest.java index 509223c5..42938c6a 100644 --- a/src/test/java/com/datastax/cdm/data/DataUtilityTest.java +++ b/src/test/java/com/datastax/cdm/data/DataUtilityTest.java @@ -19,9 +19,12 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; +import java.io.File; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -159,4 +162,28 @@ public void getMyClassMethodLineTestUnknown() { ex.setStackTrace(new StackTraceElement[] {}); assertEquals("Unknown", DataUtility.getMyClassMethodLine(ex)); } + + @Test + public void generateSCBOrigin() throws IOException { + File scb = DataUtility.generateSCB("localhost", "9042", "trust123", "./pom.xml", "key123", "./pom.xml", + PKFactory.Side.ORIGIN); + assertNotNull(scb); + File file = new File(PKFactory.Side.ORIGIN + DataUtility.SCB_FILE_NAME); + assertTrue(file.exists()); + + DataUtility.deleteGeneratedSCB(); + assertFalse(file.exists()); + } + + @Test + public void generateSCBTarget() throws IOException { + File scb = DataUtility.generateSCB("localhost", "9042", "trust123", "./pom.xml", "key123", "./pom.xml", + PKFactory.Side.TARGET); + assertNotNull(scb); + File file = new File(PKFactory.Side.TARGET + DataUtility.SCB_FILE_NAME); + assertTrue(file.exists()); + + DataUtility.deleteGeneratedSCB(); + assertFalse(file.exists()); + } } diff --git a/src/test/java/com/datastax/cdm/feature/ConstantColumnsTest.java b/src/test/java/com/datastax/cdm/feature/ConstantColumnsTest.java index f987d947..cf80aa62 100644 --- a/src/test/java/com/datastax/cdm/feature/ConstantColumnsTest.java +++ b/src/test/java/com/datastax/cdm/feature/ConstantColumnsTest.java @@ -15,10 +15,12 @@ */ package com.datastax.cdm.feature; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.ArgumentMatchers.any; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.ArrayList; diff --git a/src/test/java/com/datastax/cdm/feature/ExtractJsonTest.java b/src/test/java/com/datastax/cdm/feature/ExtractJsonTest.java index 0cec5f6a..6267c236 100644 --- a/src/test/java/com/datastax/cdm/feature/ExtractJsonTest.java +++ b/src/test/java/com/datastax/cdm/feature/ExtractJsonTest.java @@ -15,10 +15,13 @@ */ package com.datastax.cdm.feature; -import static org.apache.hadoop.shaded.com.google.common.base.CharMatcher.any; -import static org.junit.jupiter.api.Assertions.*; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.Mockito.*; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.when; import java.util.Arrays; import java.util.Collections; @@ -29,14 +32,12 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; -import com.datastax.cdm.data.CqlConversion; import com.datastax.cdm.data.CqlData; import com.datastax.cdm.properties.IPropertyHelper; import com.datastax.cdm.properties.KnownProperties; import com.datastax.cdm.schema.CqlTable; import com.datastax.oss.driver.api.core.type.DataType; import com.datastax.oss.driver.api.core.type.DataTypes; -import com.datastax.oss.driver.api.core.type.codec.registry.MutableCodecRegistry; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; diff --git a/src/test/java/com/datastax/cdm/job/ConnectionFetcherTest.java b/src/test/java/com/datastax/cdm/job/ConnectionFetcherTest.java new file mode 100644 index 00000000..6ff4cb5f --- /dev/null +++ b/src/test/java/com/datastax/cdm/job/ConnectionFetcherTest.java @@ -0,0 +1,52 @@ +package com.datastax.cdm.job; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import org.apache.spark.SparkContext; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; + +import com.datastax.cdm.cql.CommonMocks; +import com.datastax.cdm.data.PKFactory; +import com.datastax.cdm.properties.IPropertyHelper; +import com.datastax.cdm.properties.KnownProperties; + +public class ConnectionFetcherTest extends CommonMocks { + + @Mock + IPropertyHelper propertyHelper; + + @Mock + private SparkContext context; + + private ConnectionFetcher cf; + + @BeforeEach + public void setup() { + defaultClassVariables(); + commonSetupWithoutDefaultClassVariables(); + MockitoAnnotations.openMocks(this); + + cf = new ConnectionFetcher(context, propertyHelper); + } + + @Test + public void getConnectionDetailsOrigin() { + when(propertyHelper.getAsString(KnownProperties.CONNECT_ORIGIN_HOST)).thenReturn("origin_host"); + when(propertyHelper.getAsString(KnownProperties.CONNECT_TARGET_HOST)).thenReturn("target_host"); + ConnectionDetails cd = cf.getConnectionDetails(PKFactory.Side.ORIGIN); + assertEquals("origin_host", cd.host()); + } + + @Test + public void getConnectionDetailsTarget() { + when(propertyHelper.getAsString(KnownProperties.CONNECT_ORIGIN_HOST)).thenReturn("origin_host"); + when(propertyHelper.getAsString(KnownProperties.CONNECT_TARGET_HOST)).thenReturn("target_host"); + ConnectionDetails cd = cf.getConnectionDetails(PKFactory.Side.TARGET); + assertEquals("target_host", cd.host()); + } + +}