diff --git a/build.sbt b/build.sbt index 2e96ab2cc..233501797 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,7 @@ name := "overwatch" organization := "com.databricks.labs" -version := "0.5.0.3" +version := "0.5.0.4" scalaVersion := "2.12.12" scalacOptions ++= Seq("-Xmax-classfile-name", "78") diff --git a/src/main/scala/com/databricks/labs/overwatch/ApiCall.scala b/src/main/scala/com/databricks/labs/overwatch/ApiCall.scala index 8976641ac..831fbd952 100644 --- a/src/main/scala/com/databricks/labs/overwatch/ApiCall.scala +++ b/src/main/scala/com/databricks/labs/overwatch/ApiCall.scala @@ -1,6 +1,6 @@ package com.databricks.labs.overwatch -import com.databricks.labs.overwatch.utils.{ApiCallFailure, ApiEnv, Config, JsonUtils, NoNewDataException, SchemaTools, SparkSessionWrapper, TokenError} +import com.databricks.labs.overwatch.utils._ import com.fasterxml.jackson.databind.JsonMappingException import org.apache.log4j.{Level, Logger} import org.apache.spark.sql.DataFrame diff --git a/src/main/scala/com/databricks/labs/overwatch/ParamDeserializer.scala b/src/main/scala/com/databricks/labs/overwatch/ParamDeserializer.scala index ab48aa703..1e0d3c702 100644 --- a/src/main/scala/com/databricks/labs/overwatch/ParamDeserializer.scala +++ b/src/main/scala/com/databricks/labs/overwatch/ParamDeserializer.scala @@ -7,9 +7,8 @@ import com.fasterxml.jackson.databind.node.ArrayNode import com.fasterxml.jackson.databind.{DeserializationContext, JsonNode} import java.io.IOException -import scala.collection.mutable.ArrayBuffer +import scala.collection.JavaConverters._ import scala.util.{Failure, Success, Try} -import collection.JavaConverters._ /** * Custom deserializer to convert json string coming from jobs main class into validated, strongly typed object diff --git a/src/main/scala/com/databricks/labs/overwatch/env/Workspace.scala b/src/main/scala/com/databricks/labs/overwatch/env/Workspace.scala index cc087b2b9..387db143a 100644 --- a/src/main/scala/com/databricks/labs/overwatch/env/Workspace.scala +++ b/src/main/scala/com/databricks/labs/overwatch/env/Workspace.scala @@ -2,7 +2,7 @@ package com.databricks.labs.overwatch.env import com.databricks.labs.overwatch.ApiCall import com.databricks.labs.overwatch.utils.{ApiEnv, Config, SparkSessionWrapper} -import org.apache.log4j.{Level, Logger} +import org.apache.log4j.Logger import org.apache.spark.sql.DataFrame import org.apache.spark.sql.functions._ diff --git a/src/main/scala/com/databricks/labs/overwatch/pipeline/Bronze.scala b/src/main/scala/com/databricks/labs/overwatch/pipeline/Bronze.scala index 2b1e51514..3e3b158de 100644 --- a/src/main/scala/com/databricks/labs/overwatch/pipeline/Bronze.scala +++ b/src/main/scala/com/databricks/labs/overwatch/pipeline/Bronze.scala @@ -1,7 +1,7 @@ package com.databricks.labs.overwatch.pipeline import com.databricks.labs.overwatch.env.{Database, Workspace} -import com.databricks.labs.overwatch.utils.{Config, Layer, OverwatchScope} +import com.databricks.labs.overwatch.utils.{Config, OverwatchScope} import org.apache.log4j.{Level, Logger} diff --git a/src/main/scala/com/databricks/labs/overwatch/pipeline/BronzeTransforms.scala b/src/main/scala/com/databricks/labs/overwatch/pipeline/BronzeTransforms.scala index 7a32bc4eb..e448467cf 100644 --- a/src/main/scala/com/databricks/labs/overwatch/pipeline/BronzeTransforms.scala +++ b/src/main/scala/com/databricks/labs/overwatch/pipeline/BronzeTransforms.scala @@ -1,6 +1,5 @@ package com.databricks.labs.overwatch.pipeline -import com.databricks.dbutils_v1.DBUtilsHolder.dbutils import com.databricks.labs.overwatch.ApiCall import com.databricks.labs.overwatch.env.Database import com.databricks.labs.overwatch.utils.{SparkSessionWrapper, _} @@ -10,14 +9,12 @@ import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper import org.apache.log4j.{Level, Logger} import org.apache.spark.eventhubs.{ConnectionStringBuilder, EventHubsConf, EventPosition} -import org.apache.spark.sql.catalyst.expressions.Slice import org.apache.spark.sql.expressions.Window import org.apache.spark.sql.functions._ import org.apache.spark.sql.types.{BooleanType, StringType, StructField, StructType} import org.apache.spark.sql.{Column, DataFrame} import org.apache.spark.util.SerializableConfiguration -import java.io.FileNotFoundException import java.time.{Duration, LocalDateTime} import scala.collection.parallel.ForkJoinTaskSupport import scala.concurrent.forkjoin.ForkJoinPool @@ -149,7 +146,7 @@ trait BronzeTransforms extends SparkSessionWrapper { runID: String ): DataFrame = { - val connectionString = ConnectionStringBuilder(ehConfig.connectionString) + val connectionString = ConnectionStringBuilder(PipelineFunctions.parseEHConnectionString(ehConfig.connectionString)) .setEventHubName(ehConfig.eventHubName) .build diff --git a/src/main/scala/com/databricks/labs/overwatch/pipeline/Gold.scala b/src/main/scala/com/databricks/labs/overwatch/pipeline/Gold.scala index 4316dba37..7fbaeb7a8 100644 --- a/src/main/scala/com/databricks/labs/overwatch/pipeline/Gold.scala +++ b/src/main/scala/com/databricks/labs/overwatch/pipeline/Gold.scala @@ -1,7 +1,7 @@ package com.databricks.labs.overwatch.pipeline import com.databricks.labs.overwatch.env.{Database, Workspace} -import com.databricks.labs.overwatch.utils.{Config, Layer, OverwatchScope} +import com.databricks.labs.overwatch.utils.{Config, OverwatchScope} import org.apache.log4j.Logger class Gold(_workspace: Workspace, _database: Database, _config: Config) diff --git a/src/main/scala/com/databricks/labs/overwatch/pipeline/GoldTransforms.scala b/src/main/scala/com/databricks/labs/overwatch/pipeline/GoldTransforms.scala index 059a3d155..1391fe17a 100644 --- a/src/main/scala/com/databricks/labs/overwatch/pipeline/GoldTransforms.scala +++ b/src/main/scala/com/databricks/labs/overwatch/pipeline/GoldTransforms.scala @@ -4,7 +4,6 @@ import com.databricks.labs.overwatch.pipeline.TransformFunctions._ import com.databricks.labs.overwatch.utils.{BadConfigException, SparkSessionWrapper, TimeTypes} import org.apache.spark.sql.expressions.Window import org.apache.spark.sql.functions._ -import org.apache.spark.sql.types.DateType import org.apache.spark.sql.{Column, DataFrame} trait GoldTransforms extends SparkSessionWrapper { diff --git a/src/main/scala/com/databricks/labs/overwatch/pipeline/Module.scala b/src/main/scala/com/databricks/labs/overwatch/pipeline/Module.scala index c1b693f87..efa84da97 100644 --- a/src/main/scala/com/databricks/labs/overwatch/pipeline/Module.scala +++ b/src/main/scala/com/databricks/labs/overwatch/pipeline/Module.scala @@ -1,13 +1,10 @@ package com.databricks.labs.overwatch.pipeline -import com.databricks.labs.overwatch.pipeline.Pipeline.deriveLocalDate import com.databricks.labs.overwatch.pipeline.TransformFunctions._ import com.databricks.labs.overwatch.utils._ import org.apache.log4j.{Level, Logger} import org.apache.spark.sql.DataFrame -import java.time.{Instant, LocalDate} - class Module( val moduleId: Int, val moduleName: String, diff --git a/src/main/scala/com/databricks/labs/overwatch/pipeline/Pipeline.scala b/src/main/scala/com/databricks/labs/overwatch/pipeline/Pipeline.scala index 130fde857..e6a6e2683 100644 --- a/src/main/scala/com/databricks/labs/overwatch/pipeline/Pipeline.scala +++ b/src/main/scala/com/databricks/labs/overwatch/pipeline/Pipeline.scala @@ -3,7 +3,6 @@ package com.databricks.labs.overwatch.pipeline import com.databricks.labs.overwatch.env.{Database, Workspace} import com.databricks.labs.overwatch.pipeline.Pipeline.{deriveLocalDate, systemZoneId, systemZoneOffset} import com.databricks.labs.overwatch.utils._ -import io.delta.tables.DeltaTable import org.apache.log4j.{Level, Logger} import org.apache.spark.sql.DataFrame import org.apache.spark.sql.expressions.Window diff --git a/src/main/scala/com/databricks/labs/overwatch/pipeline/PipelineFunctions.scala b/src/main/scala/com/databricks/labs/overwatch/pipeline/PipelineFunctions.scala index e9d5843dc..9552e6c3d 100644 --- a/src/main/scala/com/databricks/labs/overwatch/pipeline/PipelineFunctions.scala +++ b/src/main/scala/com/databricks/labs/overwatch/pipeline/PipelineFunctions.scala @@ -1,12 +1,13 @@ package com.databricks.labs.overwatch.pipeline +import com.databricks.dbutils_v1.DBUtilsHolder.dbutils import com.databricks.labs.overwatch.utils.Frequency.Frequency -import com.databricks.labs.overwatch.utils.{Config, Frequency, IncrementalFilter} +import com.databricks.labs.overwatch.utils.{Config, IncrementalFilter} import org.apache.log4j.{Level, Logger} -import org.apache.spark.sql.expressions.Window import org.apache.spark.sql.functions._ import org.apache.spark.sql.types._ -import org.apache.spark.sql.{Column, DataFrame, Row, SparkSession} +import org.apache.spark.sql.{Column, DataFrame, SparkSession} + import java.net.URI object PipelineFunctions { @@ -14,6 +15,24 @@ object PipelineFunctions { private val uriSchemeRegex = "^([a-zA-Z][-.+a-zA-Z0-9]*):/.*".r + /** + * parses the value for the connection string from the scope/key defined if the pattern matches {{secrets/scope/key}} + * otherwise return the true string value + * https://docs.databricks.com/security/secrets/secrets.html#store-the-path-to-a-secret-in-a-spark-configuration-property + * @param connectionString + * @return + */ + def parseEHConnectionString(connectionString: String): String = { + val secretsRE = "\\{\\{secrets/([^/]+)/([^}]+)\\}\\}".r + + secretsRE.findFirstMatchIn(connectionString) match { + case Some(i) => + dbutils.secrets.get(i.group(1), i.group(2)) + case None => + connectionString + } + } + /** * Ensure no duplicate slashes in path and default to dbfs:/ URI prefix where no uri specified to result in * fully qualified URI for db location diff --git a/src/main/scala/com/databricks/labs/overwatch/pipeline/PipelineTable.scala b/src/main/scala/com/databricks/labs/overwatch/pipeline/PipelineTable.scala index df0f22f28..17ca93c76 100644 --- a/src/main/scala/com/databricks/labs/overwatch/pipeline/PipelineTable.scala +++ b/src/main/scala/com/databricks/labs/overwatch/pipeline/PipelineTable.scala @@ -5,10 +5,10 @@ import com.databricks.labs.overwatch.utils.Frequency.Frequency import com.databricks.labs.overwatch.utils._ import org.apache.log4j.{Level, Logger} import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.catalyst.catalog.CatalogTable import org.apache.spark.sql.functions._ import org.apache.spark.sql.types._ import org.apache.spark.sql.{AnalysisException, DataFrame} -import org.apache.spark.sql.catalyst.catalog.CatalogTable // TODO -- Add rules: Array[Rule] to enable Rules engine calculations in the append // also add ruleStrategy: Enum(Kill, Quarantine, Ignore) to determine when to require them diff --git a/src/main/scala/com/databricks/labs/overwatch/pipeline/Silver.scala b/src/main/scala/com/databricks/labs/overwatch/pipeline/Silver.scala index 756d66f6b..87ff3b610 100644 --- a/src/main/scala/com/databricks/labs/overwatch/pipeline/Silver.scala +++ b/src/main/scala/com/databricks/labs/overwatch/pipeline/Silver.scala @@ -1,7 +1,7 @@ package com.databricks.labs.overwatch.pipeline import com.databricks.labs.overwatch.env.{Database, Workspace} -import com.databricks.labs.overwatch.utils.{Config, Layer, OverwatchScope} +import com.databricks.labs.overwatch.utils.{Config, OverwatchScope} import org.apache.log4j.Logger class Silver(_workspace: Workspace, _database: Database, _config: Config) diff --git a/src/main/scala/com/databricks/labs/overwatch/pipeline/TransformFunctions.scala b/src/main/scala/com/databricks/labs/overwatch/pipeline/TransformFunctions.scala index fe301ed01..17d9eb674 100644 --- a/src/main/scala/com/databricks/labs/overwatch/pipeline/TransformFunctions.scala +++ b/src/main/scala/com/databricks/labs/overwatch/pipeline/TransformFunctions.scala @@ -6,7 +6,7 @@ import org.apache.spark.sql.catalyst.plans.logical.SubqueryAlias import org.apache.spark.sql.expressions.{Window, WindowSpec} import org.apache.spark.sql.functions._ import org.apache.spark.sql.types._ -import org.apache.spark.sql.{AnalysisException, Column, DataFrame, Dataset, SparkSession} +import org.apache.spark.sql.{AnalysisException, Column, DataFrame, Dataset} import java.time.LocalDate diff --git a/src/main/scala/com/databricks/labs/overwatch/utils/Structures.scala b/src/main/scala/com/databricks/labs/overwatch/utils/Structures.scala index 91ddcbd83..dc5b41f95 100644 --- a/src/main/scala/com/databricks/labs/overwatch/utils/Structures.scala +++ b/src/main/scala/com/databricks/labs/overwatch/utils/Structures.scala @@ -3,16 +3,16 @@ package com.databricks.labs.overwatch.utils import com.databricks.labs.overwatch.pipeline.{Module, PipelineTable} import com.databricks.labs.overwatch.utils.Frequency.Frequency import com.databricks.labs.overwatch.utils.OverwatchScope.OverwatchScope +import com.databricks.labs.overwatch.validation.SnapReport import org.apache.log4j.Level import org.apache.spark.sql.Column import org.apache.spark.sql.catalyst.ScalaReflection import org.apache.spark.sql.types.{StructField, StructType} + +import java.sql.Timestamp import java.text.SimpleDateFormat import java.time.{LocalDateTime, ZonedDateTime} import java.util.Date -import java.sql.Timestamp - -import com.databricks.labs.overwatch.validation.SnapReport case class DBDetail() @@ -69,6 +69,7 @@ case class AuditLogConfig( auditLogFormat: String = "json", azureAuditLogEventhubConfig: Option[AzureAuditLogEventhubConfig] = None ) + case class IntelligentScaling(enabled: Boolean = false, minimumCores: Int = 4, maximumCores: Int = 512, coeff: Double = 1.0) case class OverwatchParams(auditLogConfig: AuditLogConfig, @@ -144,7 +145,9 @@ case class SimplifiedModuleStatusReport( ) case class IncrementalFilter(cronField: StructField, low: Column, high: Column) + case class UpgradeReport(db: String, tbl: String, errorMsg: Option[String]) + object OverwatchScope extends Enumeration { type OverwatchScope = Value val jobs, clusters, clusterEvents, sparkEvents, audit, notebooks, accounts, pools = Value diff --git a/src/main/scala/com/databricks/labs/overwatch/utils/Upgrade.scala b/src/main/scala/com/databricks/labs/overwatch/utils/Upgrade.scala index ca7c7ece7..44232ce85 100644 --- a/src/main/scala/com/databricks/labs/overwatch/utils/Upgrade.scala +++ b/src/main/scala/com/databricks/labs/overwatch/utils/Upgrade.scala @@ -1,17 +1,13 @@ package com.databricks.labs.overwatch.utils import com.databricks.labs.overwatch.env.Workspace -import com.databricks.labs.overwatch.pipeline.{Bronze, Gold, Pipeline, PipelineTable} +import com.databricks.labs.overwatch.pipeline.{Bronze, Gold} import org.apache.log4j.{Level, Logger} -import org.apache.spark.sql.{Column, DataFrame, Dataset} -import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.catalog.CatalogTable -import org.apache.spark.sql.functions._ import org.apache.spark.sql.expressions.Window +import org.apache.spark.sql.functions._ +import org.apache.spark.sql.{Column, Dataset} import scala.collection.mutable.ArrayBuffer -import scala.collection.parallel.ForkJoinTaskSupport -import scala.concurrent.forkjoin.ForkJoinPool object Upgrade extends SparkSessionWrapper { diff --git a/src/main/scala/com/databricks/labs/overwatch/validation/Kitana.scala b/src/main/scala/com/databricks/labs/overwatch/validation/Kitana.scala index 09980517c..13aa0c2a6 100644 --- a/src/main/scala/com/databricks/labs/overwatch/validation/Kitana.scala +++ b/src/main/scala/com/databricks/labs/overwatch/validation/Kitana.scala @@ -1,21 +1,13 @@ package com.databricks.labs.overwatch.validation -import com.databricks.dbutils_v1.DBUtilsHolder.dbutils -import com.databricks.labs.overwatch.env.{Database, Workspace} -import com.databricks.labs.overwatch.pipeline.{Bronze, Gold, Initializer, Module, Pipeline, PipelineTable, Silver} +import com.databricks.labs.overwatch.env.Workspace +import com.databricks.labs.overwatch.pipeline.TransformFunctions._ +import com.databricks.labs.overwatch.pipeline.{Initializer, Module, Pipeline, PipelineTable} import com.databricks.labs.overwatch.utils.JsonUtils.objToJson import com.databricks.labs.overwatch.utils._ -import com.databricks.labs.overwatch.pipeline.TransformFunctions._ -import org.apache.spark.sql.{Column, DataFrame, Dataset} -import org.apache.spark.sql.functions._ import org.apache.log4j.{Level, Logger} -import org.apache.spark.sql.expressions.Window - -import java.sql.Timestamp -import java.text.SimpleDateFormat -import java.time.{Duration, LocalDate} -import scala.collection.parallel.ForkJoinTaskSupport -import java.util.concurrent.ForkJoinPool +import org.apache.spark.sql.functions._ +import org.apache.spark.sql.{DataFrame, Dataset} case class SnapReport(tableFullName: String, from: java.sql.Timestamp, diff --git a/src/main/scala/com/databricks/labs/overwatch/validation/Scenarios.scala b/src/main/scala/com/databricks/labs/overwatch/validation/Scenarios.scala index f0ec2df52..37d772c0e 100644 --- a/src/main/scala/com/databricks/labs/overwatch/validation/Scenarios.scala +++ b/src/main/scala/com/databricks/labs/overwatch/validation/Scenarios.scala @@ -1,12 +1,12 @@ package com.databricks.labs.overwatch.validation +import com.databricks.labs.overwatch.pipeline.TransformFunctions._ import com.databricks.labs.overwatch.utils.SparkSessionWrapper -import com.databricks.labs.validation.utils.Structures._ import com.databricks.labs.validation._ -import org.apache.spark.sql.{Column, DataFrame} -import org.apache.spark.sql.functions._ +import com.databricks.labs.validation.utils.Structures._ import org.apache.spark.sql.expressions.Window -import com.databricks.labs.overwatch.pipeline.TransformFunctions._ +import org.apache.spark.sql.functions._ +import org.apache.spark.sql.{Column, DataFrame} object Scenarios extends SparkSessionWrapper { diff --git a/src/main/scala/com/databricks/labs/overwatch/validation/ValidationUtils.scala b/src/main/scala/com/databricks/labs/overwatch/validation/ValidationUtils.scala index c4b4c2b23..9739db1ba 100644 --- a/src/main/scala/com/databricks/labs/overwatch/validation/ValidationUtils.scala +++ b/src/main/scala/com/databricks/labs/overwatch/validation/ValidationUtils.scala @@ -1,24 +1,21 @@ package com.databricks.labs.overwatch.validation -import com.databricks.dbutils_v1.DBUtilsHolder.dbutils import com.databricks.labs.overwatch.env.Workspace -import com.databricks.labs.overwatch.pipeline._ import com.databricks.labs.overwatch.pipeline.TransformFunctions._ +import com.databricks.labs.overwatch.pipeline._ import com.databricks.labs.overwatch.utils._ -import io.delta.tables.{DeltaMergeBuilder, DeltaTable} -import org.apache.spark.sql.{AnalysisException, Column, DataFrame, Dataset} -import org.apache.spark.sql.functions._ -import org.apache.spark.sql.types._ +import io.delta.tables.DeltaTable import org.apache.log4j.{Level, Logger} -import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.Dataset import org.apache.spark.sql.expressions.Window +import org.apache.spark.sql.functions._ import java.sql.Timestamp import java.text.SimpleDateFormat -import java.time.{Duration, LocalDate} +import java.time.Duration import java.util.concurrent.ForkJoinPool -import scala.collection.parallel.{ForkJoinTaskSupport, ParSeq} import scala.collection.parallel.mutable.ParArray +import scala.collection.parallel.{ForkJoinTaskSupport, ParSeq} class ValidationUtils(sourceDBName: String, snapWorkspace: Workspace, _paralellism: Option[Int]) extends SparkSessionWrapper { diff --git a/src/test/scala/com/databricks/labs/overwatch/ParamDeserializerTest.scala b/src/test/scala/com/databricks/labs/overwatch/ParamDeserializerTest.scala index 7d6b2da2c..f46077e41 100644 --- a/src/test/scala/com/databricks/labs/overwatch/ParamDeserializerTest.scala +++ b/src/test/scala/com/databricks/labs/overwatch/ParamDeserializerTest.scala @@ -1,6 +1,6 @@ package com.databricks.labs.overwatch -import com.databricks.labs.overwatch.utils.{AuditLogConfig, AzureAuditLogEventhubConfig, DatabricksContractPrices, IntelligentScaling, OverwatchParams} +import com.databricks.labs.overwatch.utils._ import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.databind.module.SimpleModule import com.fasterxml.jackson.module.scala.DefaultScalaModule diff --git a/src/test/scala/com/databricks/labs/overwatch/pipeline/PipelineFunctionsTest.scala b/src/test/scala/com/databricks/labs/overwatch/pipeline/PipelineFunctionsTest.scala index 38f6b5e85..0e66f85f6 100644 --- a/src/test/scala/com/databricks/labs/overwatch/pipeline/PipelineFunctionsTest.scala +++ b/src/test/scala/com/databricks/labs/overwatch/pipeline/PipelineFunctionsTest.scala @@ -1,16 +1,13 @@ package com.databricks.labs.overwatch.pipeline import com.databricks.labs.overwatch.SparkSessionTestWrapper -import com.databricks.labs.overwatch.utils.Frequency.Frequency -import com.databricks.labs.overwatch.utils.{Frequency, IncrementalFilter} import com.github.mrpowers.spark.fast.tests.DataFrameComparer -import org.apache.spark.sql.functions.{col, lit} +import org.apache.spark.sql.{Column, SQLContext} +import org.apache.spark.sql.execution.streaming.MemoryStream +import org.apache.spark.sql.functions.col import org.apache.spark.sql.internal.StaticSQLConf import org.apache.spark.sql.types._ import org.scalatest.funspec.AnyFunSpec -import org.apache.spark.sql.execution.streaming.MemoryStream -import org.apache.spark.sql.SQLContext -import org.apache.spark.sql.Column class PipelineFunctionsTest extends AnyFunSpec with DataFrameComparer with SparkSessionTestWrapper { diff --git a/src/test/scala/com/databricks/labs/overwatch/pipeline/TransformFunctionsTest.scala b/src/test/scala/com/databricks/labs/overwatch/pipeline/TransformFunctionsTest.scala index 8aa3f4e10..8b7b8d829 100644 --- a/src/test/scala/com/databricks/labs/overwatch/pipeline/TransformFunctionsTest.scala +++ b/src/test/scala/com/databricks/labs/overwatch/pipeline/TransformFunctionsTest.scala @@ -1,15 +1,15 @@ package com.databricks.labs.overwatch.pipeline -import java.sql.{Date, Timestamp} -import java.time.Instant - import com.databricks.labs.overwatch.SparkSessionTestWrapper import com.github.mrpowers.spark.fast.tests.DataFrameComparer import org.apache.spark.sql.Row -import org.apache.spark.sql.functions.{col, lit, struct} +import org.apache.spark.sql.functions.{col, lit} import org.apache.spark.sql.types._ import org.scalatest.funspec.AnyFunSpec +import java.sql.{Date, Timestamp} +import java.time.Instant + class TransformFunctionsTest extends AnyFunSpec with DataFrameComparer with SparkSessionTestWrapper { import spark.implicits._ spark.conf.set("spark.sql.session.timeZone", "UTC") diff --git a/src/test/scala/com/databricks/labs/overwatch/utils/ConfigTest.scala b/src/test/scala/com/databricks/labs/overwatch/utils/ConfigTest.scala index e84b2edde..b4c242c3b 100644 --- a/src/test/scala/com/databricks/labs/overwatch/utils/ConfigTest.scala +++ b/src/test/scala/com/databricks/labs/overwatch/utils/ConfigTest.scala @@ -1,7 +1,6 @@ package com.databricks.labs.overwatch.utils import org.scalatest.funspec.AnyFunSpec -import org.scalatest.PrivateMethodTester._ class ConfigTest extends AnyFunSpec { diff --git a/src/test/scala/com/databricks/labs/overwatch/utils/SchemaToolsTest.scala b/src/test/scala/com/databricks/labs/overwatch/utils/SchemaToolsTest.scala index 6c009689d..50066090a 100644 --- a/src/test/scala/com/databricks/labs/overwatch/utils/SchemaToolsTest.scala +++ b/src/test/scala/com/databricks/labs/overwatch/utils/SchemaToolsTest.scala @@ -1,7 +1,6 @@ package com.databricks.labs.overwatch.utils import com.databricks.labs.overwatch.SparkSessionTestWrapper -import org.apache.spark.sql.functions.col import org.scalatest.funspec.AnyFunSpec class SchemaToolsTest extends AnyFunSpec with SparkSessionTestWrapper {