diff --git a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/arrayTypes.conf b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/arrayTypes.conf index 61bde4a403..eb36a2eed3 100644 --- a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/arrayTypes.conf +++ b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/arrayTypes.conf @@ -4,7 +4,7 @@ as well as use the dataFrame API"; INCLUDE $JTESTS/io/snappydata/hydra/northwind/startDualModeCluster.conf; -TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSnappyJob +TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSnappyJob io.snappydata.hydra.cluster.SnappyPrms-jobClassNames = io.snappydata.hydra.complexdatatypes.ArrayType io.snappydata.hydra.cluster.SnappyPrms-userAppName = "ComplexType_ArrayType_Validation" io.snappydata.hydra.cluster.SnappyPrms-appPropsForJobServer ="dataFilesLocation=${dataFilesLocation}" @@ -12,7 +12,7 @@ TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask threadGroups = snappyStoreThreads maxTimesToRun = 1; -TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSnappyJob +TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSnappyJob io.snappydata.hydra.cluster.SnappyPrms-jobClassNames = io.snappydata.hydra.complexdatatypes.ArrayTypeAPI io.snappydata.hydra.cluster.SnappyPrms-userAppName = "ComplexType_ArrayType_Validation_Via_API" io.snappydata.hydra.cluster.SnappyPrms-appPropsForJobServer ="dataFilesLocation=${dataFilesLocation}" @@ -20,6 +20,13 @@ TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask threadGroups = snappyStoreThreads maxTimesToRun = 1; +TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSnappyJob + io.snappydata.hydra.cluster.SnappyPrms-jobClassNames = io.snappydata.hydra.complexdatatypes.ArrayTypeNULLValue + io.snappydata.hydra.cluster.SnappyPrms-userAppName = "ComplexType_ArrayTypeNULLValue_Validation" + io.snappydata.hydra.cluster.SnappyPrms-userAppJar = snappydata-store-scala-tests*tests.jar + threadGroups = snappyStoreThreads + maxTimesToRun = 1; + INCLUDE $JTESTS/io/snappydata/hydra/northwind/stopDualModeCluster.conf; hydra.Prms-maxResultWaitSec = 7200; hydra.Prms-totalTaskTimeSec = 7200; diff --git a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/mapTypes.conf b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/mapTypes.conf index d54b6664e7..56ab9e0d32 100644 --- a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/mapTypes.conf +++ b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/mapTypes.conf @@ -20,6 +20,13 @@ TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask threadGroups = snappyStoreThreads maxTimesToRun = 1; +TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSnappyJob + io.snappydata.hydra.cluster.SnappyPrms-jobClassNames = io.snappydata.hydra.complexdatatypes.MapTypeNULLValue + io.snappydata.hydra.cluster.SnappyPrms-userAppName = "ComplexType_MapTypeNULLValue_Validation" + io.snappydata.hydra.cluster.SnappyPrms-userAppJar = snappydata-store-scala-tests*tests.jar + threadGroups = snappyStoreThreads + maxTimesToRun = 1; + INCLUDE $JTESTS/io/snappydata/hydra/northwind/stopDualModeCluster.conf; hydra.Prms-maxResultWaitSec = 7200; hydra.Prms-totalTaskTimeSec = 7200; diff --git a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorArrayTypes.conf b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorArrayTypes.conf index 00a3bb9756..a06a12c4ed 100644 --- a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorArrayTypes.conf +++ b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorArrayTypes.conf @@ -19,6 +19,12 @@ TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask threadGroups = snappyStoreThreads maxTimesToRun = 1; +TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSparkJob + io.snappydata.hydra.cluster.SnappyPrms-sparkJobClassNames = io.snappydata.hydra.complexdatatypes.SmartConnectorArrayTypeNULL + io.snappydata.hydra.cluster.SnappyPrms-userAppJar = snappydata-store-scala-tests*tests.jar + threadGroups = snappyStoreThreads + maxTimesToRun = 1; + INCLUDE $JTESTS/io/snappydata/hydra/northwind/stopDualModeCluster.conf; hydra.Prms-maxResultWaitSec = 7200; hydra.Prms-totalTaskTimeSec = 7200; diff --git a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorMapTypes.conf b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorMapTypes.conf index 07a66990c0..721af9a5e7 100644 --- a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorMapTypes.conf +++ b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorMapTypes.conf @@ -4,7 +4,6 @@ query as well as dataFrame API"; INCLUDE $JTESTS/io/snappydata/hydra/northwind/startDualModeCluster.conf; - TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSparkJob io.snappydata.hydra.cluster.SnappyPrms-sparkJobClassNames = io.snappydata.hydra.complexdatatypes.SmartConnectorMapType io.snappydata.hydra.cluster.SnappyPrms-userAppArgs = "${dataFilesLocation}" @@ -19,6 +18,12 @@ TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_e threadGroups = snappyStoreThreads maxTimesToRun = 1; +TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSparkJob + io.snappydata.hydra.cluster.SnappyPrms-sparkJobClassNames = io.snappydata.hydra.complexdatatypes.SmartConnectorMapTypeNULL + io.snappydata.hydra.cluster.SnappyPrms-userAppJar = snappydata-store-scala-tests*tests.jar + threadGroups = snappyStoreThreads + maxTimesToRun = 1; + INCLUDE $JTESTS/io/snappydata/hydra/northwind/stopDualModeCluster.conf; hydra.Prms-maxResultWaitSec = 7200; hydra.Prms-totalTaskTimeSec = 7200; diff --git a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorStructTypes.conf b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorStructTypes.conf index 775174a3cc..f88ad097ba 100644 --- a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorStructTypes.conf +++ b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorStructTypes.conf @@ -18,6 +18,12 @@ TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_e threadGroups = snappyStoreThreads maxTimesToRun = 1; +TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSparkJob + io.snappydata.hydra.cluster.SnappyPrms-sparkJobClassNames = io.snappydata.hydra.complexdatatypes.SmartConnectorStructTypeNULL + io.snappydata.hydra.cluster.SnappyPrms-userAppJar = snappydata-store-scala-tests*tests.jar + threadGroups = snappyStoreThreads + maxTimesToRun = 1; + INCLUDE $JTESTS/io/snappydata/hydra/northwind/stopDualModeCluster.conf; hydra.Prms-maxResultWaitSec = 7200; hydra.Prms-totalTaskTimeSec = 7200; diff --git a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/structTypes.conf b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/structTypes.conf index 4bbd096cad..39d65814e4 100644 --- a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/structTypes.conf +++ b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/structTypes.conf @@ -4,7 +4,6 @@ query as well as dataFrame API"; INCLUDE $JTESTS/io/snappydata/hydra/northwind/startDualModeCluster.conf; - TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSnappyJob io.snappydata.hydra.cluster.SnappyPrms-jobClassNames = io.snappydata.hydra.complexdatatypes.StructType io.snappydata.hydra.cluster.SnappyPrms-userAppName = "ComplexType_StructType_Validation" @@ -21,6 +20,13 @@ TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask threadGroups = snappyStoreThreads maxTimesToRun = 1; +TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSnappyJob + io.snappydata.hydra.cluster.SnappyPrms-jobClassNames = io.snappydata.hydra.complexdatatypes.StructTypeNULLValue + io.snappydata.hydra.cluster.SnappyPrms-userAppName = "ComplexType_StructTypeNULLValue_Validation" + io.snappydata.hydra.cluster.SnappyPrms-userAppJar = snappydata-store-scala-tests*tests.jar + threadGroups = snappyStoreThreads + maxTimesToRun = 1; + INCLUDE $JTESTS/io/snappydata/hydra/northwind/stopDualModeCluster.conf; hydra.Prms-maxResultWaitSec = 7200; hydra.Prms-totalTaskTimeSec = 7200; diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/AllMixedTypes.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/AllMixedTypes.scala index 17d0dd91b9..051a8ee267 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/AllMixedTypes.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/AllMixedTypes.scala @@ -37,6 +37,7 @@ class AllMixedTypes extends SnappySQLJob{ val sc : SparkContext = SparkContext.getOrCreate() val sqlContext : SQLContext = SQLContext.getOrCreate(sc) val printContent : Boolean = false + val isExecute : Boolean = false def getCurrentDirectory : String = new File(".").getCanonicalPath val outputFile : String = "ValidateAllMixedTypes" + "_" + "column" + @@ -53,11 +54,17 @@ class AllMixedTypes extends SnappySQLJob{ snc.sql("CREATE TABLE IF NOT EXISTS TwentyTwenty USING COLUMN " + "AS (SELECT * FROM TempTwenty)") + /** + * No need to execute below queries because validation routine runs it. + * Keep it here for ready reference. + */ + if(isExecute) { snc.sql(ComplexTypeUtils.Mixed_Q1) snc.sql(ComplexTypeUtils.Mixed_Q2) snc.sql(ComplexTypeUtils.Mixed_Q3) snc.sql(ComplexTypeUtils.Mixed_Q4) snc.sql(ComplexTypeUtils.Mixed_Q5) + } if(printContent) { println("snc : Mixed_Q1 " + (snc.sql(ComplexTypeUtils.Mixed_Q1).show)) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArrayType.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArrayType.scala index cf6ebf813d..72c3fa44be 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArrayType.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArrayType.scala @@ -44,6 +44,7 @@ class ArrayType extends SnappySQLJob { val sc = SparkContext.getOrCreate() val sqlContext = SQLContext.getOrCreate(sc) val printContent : Boolean = false + val isExecute : Boolean = false /* --- Snappy Job --- */ snc.sql("DROP TABLE IF EXISTS Student") @@ -54,12 +55,18 @@ class ArrayType extends SnappySQLJob { "OPTIONS(path '" + dataLocation + "')") snc.sql("CREATE TABLE Student USING COLUMN AS (SELECT * FROM TempArray)") + /** + * No need to execute below queries because validation routine runs it. + * Keep it here for ready reference. + */ + if(isExecute) { snc.sql(ComplexTypeUtils.Array_Q1) snc.sql(ComplexTypeUtils.Array_Q2) snc.sql(ComplexTypeUtils.Array_Q3) snc.sql(ComplexTypeUtils.Array_View) snc.sql(ComplexTypeUtils.Array_Q4) snc.sql(ComplexTypeUtils.Array_Q5) + } if(printContent) { println(snc.sql(ComplexTypeUtils.Array_Q1).show()) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArrayTypeNULLValue.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArrayTypeNULLValue.scala new file mode 100644 index 0000000000..02d670a50d --- /dev/null +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArrayTypeNULLValue.scala @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you + * may not use this file except in compliance with the License. You + * may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing + * permissions and limitations under the License. See accompanying + * LICENSE file. + */ +package io.snappydata.hydra.complexdatatypes + +import java.io.{File, FileOutputStream, PrintWriter} + +import com.typesafe.config.Config +import io.snappydata.hydra.SnappyTestUtils +import org.apache.spark.sql._ + +class ArrayTypeNULLValue extends SnappySQLJob { + override def isValidJob(sc: SnappySession, config: Config): SnappyJobValidation = SnappyJobValid() + + override def runSnappyJob(snappySession: SnappySession, jobConfig: Config): Any = { + // scalastyle:off println + println("Validation for NULL Value in ArraysType column Job started...") + val snc : SnappyContext = snappySession.sqlContext + val spark : SparkSession = SparkSession.builder().enableHiveSupport().getOrCreate() + val sqlContext = spark.sqlContext + val outputFile = "ValidateArrayTypeNULLValue" + "_" + + System.currentTimeMillis() + jobConfig.getString("logFileName") + val pw : PrintWriter = new PrintWriter(new FileOutputStream(new File(outputFile), false)) + + /** + * Test : NULL value in Array Type column. + */ + snc.sql(ComplexTypeUtils.createSchemaST) + spark.sql(ComplexTypeUtils.createSchemaST) + /** + * Test Case 1 : ArrayType Column is last column in the table. + */ + snc.sql(ComplexTypeUtils.createTableLastColumnArrayType) + spark.sql(ComplexTypeUtils.createTableInSparkArrTypeLastColumn) + snc.sql(ComplexTypeUtils.insertNullInLastColumn) + spark.sql(ComplexTypeUtils.insertNullInLastColumn) + snc.sql(ComplexTypeUtils.insertNormalDataLastColumn) + spark.sql(ComplexTypeUtils.insertNormalDataLastColumn) + /** + * Test Case 2 : ArrayType Column is between (say middle) the other data types in the table. + */ + snc.sql(ComplexTypeUtils.createTableMiddleColumnArrayType) + spark.sql(ComplexTypeUtils.createTableInSparkArrayTypeMiddleColumn) + snc.sql(ComplexTypeUtils.insertNullInMiddleColumn) + spark.sql(ComplexTypeUtils.insertNullInMiddleColumn) + snc.sql(ComplexTypeUtils.insertNormalDataMiddleColumn) + spark.sql(ComplexTypeUtils.insertNormalDataMiddleColumn) + /** + * Test Case 3: ArrayType Column is the first column in the table. + */ + snc.sql(ComplexTypeUtils.createTableFirstColumnArrayType) + spark.sql(ComplexTypeUtils.createTableInSparkArrayTypeFirstColumn) + snc.sql(ComplexTypeUtils.insertNullInFirstColumn) + spark.sql(ComplexTypeUtils.insertNullInFirstColumn) + snc.sql(ComplexTypeUtils.insertNormalDataFirstColumn) + spark.sql(ComplexTypeUtils.insertNormalDataFirstColumn) + /** + * Validation Routine + */ + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectLastColumn, + "AQ1", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in ArrayType last column OK --") + pw.flush() + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectMiddleColumn, + "AQ2", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in ArrayType middle column OK --") + pw.flush() + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectFirstColumn, + "AQ3", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in ArrayType first column OK --") + pw.flush() + pw.close() + + snc.sql(ComplexTypeUtils.dropTableStudentLast) + snc.sql(ComplexTypeUtils.dropTableStudentMiddle) + snc.sql(ComplexTypeUtils.dropTableStudentMiddle) + spark.sql(ComplexTypeUtils.dropTableStudentLast) + spark.sql(ComplexTypeUtils.dropTableStudentMiddle) + spark.sql(ComplexTypeUtils.dropTableStudentLast) + snc.sql(ComplexTypeUtils.dropDatabaseST) + spark.sql(ComplexTypeUtils.dropDatabaseST) + } +} diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArraysOfStringInMapAsValue.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArraysOfStringInMapAsValue.scala index cac94aa74c..4a58314db5 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArraysOfStringInMapAsValue.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArraysOfStringInMapAsValue.scala @@ -43,6 +43,7 @@ class ArraysOfStringInMapAsValue extends SnappySQLJob{ val pw : PrintWriter = new PrintWriter(new FileOutputStream(new File(outputFile), false)) val dataLocation = jobConfig.getString("dataFilesLocation") val printContent : Boolean = false + val isExecute : Boolean = false /* --- Snappy Job --- */ @@ -56,10 +57,16 @@ class ArraysOfStringInMapAsValue extends SnappySQLJob{ val fp = snc.sql( "CREATE TABLE IF NOT EXISTS FamousPeople " + "USING COLUMN AS (SELECT * FROM TempFamousPeople)") + /** + * No need to execute below queries because validation routine runs it. + * Keep it here for ready reference. + */ + if(isExecute) { snc.sql(ComplexTypeUtils.Array_Map_TempView) snc.sql(ComplexTypeUtils.Array_Map_Q1) snc.sql(ComplexTypeUtils.Array_Map_Q2) snc.sql(ComplexTypeUtils.Array_Map_Q3) + } if(printContent) { println("snc : Array_Map_Q1 " + snc.sql(ComplexTypeUtils.Array_Map_Q1).show) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArraysOfStructType.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArraysOfStructType.scala index 67c2dd58bb..30308de3f5 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArraysOfStructType.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ArraysOfStructType.scala @@ -41,6 +41,9 @@ class ArraysOfStructType extends SnappySQLJob{ val pw : PrintWriter = new PrintWriter(new FileOutputStream(new File(outputFile), false)) val dataLocation = jobConfig.getString("dataFilesLocation") val printContent : Boolean = false + // Keep the isExecute value true because in validation + // routine there is a Hydra Exception which needs to be fixed. + val isExecute : Boolean = true /* --- Snappy Job --- */ snc.sql("DROP TABLE IF EXISTS TwoWheeler") @@ -51,11 +54,13 @@ class ArraysOfStructType extends SnappySQLJob{ snc.sql("CREATE TABLE IF NOT EXISTS TwoWheeler USING COLUMN " + "AS (SELECT * FROM TempBike)") + if(isExecute) { snc.sql(ComplexTypeUtils.ArraysOfStruct_Q1) snc.sql(ComplexTypeUtils.ArraysOfStruct_Q2) snc.sql(ComplexTypeUtils.ArraysOfStruct_Q3) snc.sql(ComplexTypeUtils.ArraysOfStruct_Q4) snc.sql(ComplexTypeUtils.ArraysOfStruct_Q5) + } if(printContent) { println("snc : ArraysOfStruct_Q1 " + (snc.sql(ComplexTypeUtils.ArraysOfStruct_Q1).show())) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ComplexTypeUtils.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ComplexTypeUtils.scala index aed453beed..262b3e731d 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ComplexTypeUtils.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/ComplexTypeUtils.scala @@ -47,6 +47,37 @@ object ComplexTypeUtils { val Array_Q5 : String = "SELECT name,MAX(Marks),MIN(Marks) FROM StudentMark GROUP BY name" + /* ----- Array Type NULL Value Queries ----- */ + val createSchemaST = "CREATE SCHEMA ST" + val dropDatabaseST = "DROP DATABASE ST" + val createTableLastColumnArrayType = "CREATE TABLE IF NOT EXISTS ST.StudentLast" + + "(rollno int,name String, adminDate Array) USING COLUMN" + val createTableMiddleColumnArrayType = "CREATE TABLE IF NOT EXISTS ST.StudentMiddle" + + "(rollno int,adminDate Array,time TimeStamp, class int) USING COLUMN" + val createTableFirstColumnArrayType = "CREATE TABLE IF NOT EXISTS ST.StudentFirst" + + "(Total Array,name String, rollno int) USING COLUMN" + val createTableInSparkArrTypeLastColumn = "CREATE TABLE IF NOT EXISTS ST.StudentLast" + + "(rollno int,name String, adminDate Array)" + val createTableInSparkArrayTypeMiddleColumn = "CREATE TABLE IF NOT EXISTS ST.StudentMiddle" + + "(rollno int,adminDate Array,time TimeStamp, class int)" + val createTableInSparkArrayTypeFirstColumn = "CREATE TABLE IF NOT EXISTS ST.StudentFirst" + + "(Total Array,name String, rollno int)" + + val insertNullInLastColumn = "INSERT INTO ST.StudentLast SELECT 1, 'ABC', null" + val insertNullInMiddleColumn = "INSERT INTO ST.StudentMiddle SELECT 1,null,null,6" + val insertNullInFirstColumn = "INSERT INTO ST.StudentFirst SELECT null,'BBB',20" + val insertNormalDataLastColumn = "INSERT INTO ST.StudentLast SELECT 2,'XYZ',Array('2020-01-21')" + val insertNormalDataMiddleColumn = "INSERT INTO ST.StudentMiddle SELECT " + + "1,Array('2020-01-21'), '2020-01-22 12:16:52.598', 5" + val insertNormalDataFirstColumn = "INSERT INTO ST.StudentFirst SELECT Array(25.6),'AAA',10" + + val selectLastColumn = "SELECT * FROM ST.StudentLast" + val selectMiddleColumn = "SELECT * FROM ST.StudentMiddle" + val selectFirstColumn = "SELECT * FROM ST.StudentFirst" + + val dropTableStudentLast = "DROP TABLE ST.StudentLast" + val dropTableStudentMiddle = "DROP TABLE ST.StudentMiddle" + val dropTableStudentFirst = "DROP TABLE ST.StudentFirst" /* ----- Map Type ----- */ /* ----- Snappy Map Type Queries ----- */ @@ -103,6 +134,37 @@ object ComplexTypeUtils { "SELECT name,History['history'] AS marks FROM StudentMarksRecord) " + "GROUP BY name" + /* ----- Map Type NULL Value Queries ----- */ + val createTableLastColumnMapType = "CREATE TABLE IF NOT EXISTS st.MapTypeLast" + + "(rollno int,name String, test Map) USING COLUMN" + val createTableMiddleColumnMapType = "CREATE TABLE IF NOT EXISTS st.MapTypeMiddle" + + "(rollno int, test Map, name String, class int) USING COLUMN" + val createTableFirstColumnMapType = "CREATE TABLE IF NOT EXISTS st.MapTypeFirst" + + "(Record Map, avg double, match int) USING COLUMN" + val createTableInSparkMapTypeLastColumn = "CREATE TABLE IF NOT EXISTS st.MapTypeLast" + + "(rollno int,name String, test Map)" + val createTableInSparkMapTypeMiddleColumn = "CREATE TABLE IF NOT EXISTS st.MapTypeMiddle" + + "(rollno int, test Map, name String, class int)" + val createTableInSparkMapTypeFirstColumn = "CREATE TABLE IF NOT EXISTS st.MapTypeFirst" + + "(Record Map, avg double, match int)" + + val insertNULLMapTypeLast = "INSERT INTO ST.MapTypeLast SELECT 1, 'XXD', null" + val insertNULLMapTypeMiddle = "INSERT INTO ST.MapTypeMiddle SELECT 1, null, 'ABB', 5" + val insertNULLMapTypeFirst = "INSERT INTO ST.MapTypeFirst SELECT null, 62.7d, 112" + val insertNormalDataMapTypeLast = "INSERT INTO ST.MapTypeLast SELECT 2, 'MNO', MAP('HJ','KL')" + val insertNormalDataMapTypeMiddle = "INSERT INTO ST.MapTypeMiddle " + + "SELECT 2, MAP(10,55.55d), 'TTT', 9" + val insertNormalDataMapTypeFirst = "INSERT INTO ST.MapTypeFirst " + + "SELECT MAP('Sachin', 'RightHand'), 54.6d, 400" + + val selectMapLast = "SELECT * FROM ST.MapTypeLast" + val selectMapMiddle = "SELECT * FROM ST.MapTypeMiddle" + val selectMapFirst = "SELECT * FROM ST.MapTypeFirst" + + val dropTableMapLast = "DROP TABLE ST.MapTypeLast" + val dropTableMapMiddle = "DROP TABLE ST.MapTypeMiddle" + val dropTableMapFirst = "DROP TABLE ST.MapTypeFirst" + /* ----- Struct Type ----- */ /* ----- Snappy Struct Type Queries ----- */ val Struct_Q1 : String = "SELECT name, TestRecord.Runs, TestRecord.Avg FROM CricketRecord " + @@ -114,6 +176,47 @@ object ComplexTypeUtils { "FROM CricketRecord " + "ORDER BY TestRecord.Matches DESC" + /* ----- Struct Type NULL Value Queries ----- */ + val createTableLastColumnStructType = "CREATE TABLE IF NOT EXISTS ST.CricketRecordLast(name String,allRounder boolean," + + "TestRecord STRUCT) USING COLUMN" + val createTableMiddleColumnStructType = "CREATE TABLE IF NOT EXISTS " + + "ST.CricketRecordMiddle(name String," + + "TestRecord STRUCT," + + "allRounder boolean) USING COLUMN" + val createTableFirstColumnStructType = "CREATE TABLE IF NOT EXISTS " + + "ST.CricketRecordFirst(TestRecord STRUCT," + + "name String,allRounder boolean) USING COLUMN" + val createTableInSparkStructTypeLastColumn = "CREATE TABLE IF NOT EXISTS " + + "st.CricketRecordLast(name String,allRounder boolean," + + "TestRecord STRUCT)" + val createTableInSparkStructTypeMiddleColumn = "CREATE TABLE IF NOT EXISTS " + + "ST.CricketRecordMiddle(name String," + + "TestRecord STRUCT," + + "allRounder boolean)" + val createTableInSparkStructTypeFirstColumn = "CREATE TABLE IF NOT EXISTS " + + "ST.CricketRecordFirst(TestRecord STRUCT," + + "name String,allRounder boolean)" + + val insertNULLStructTypeLast = "INSERT INTO ST.CricketRecordLast SELECT " + + "'Sachin Tendulkar', true, null" + val insertNULLStructTypeMiddle = "INSERT INTO ST.CricketRecordMiddle " + + "SELECT 'Rahul Drvaid',null,false" + val insertNULLStructTypeFirst = "INSERT INTO ST.CricketRecordFirst SELECT null, 'Kapil Dev', true" + val insertNormalDataStructTypeLast = "INSERT INTO ST.CricketRecordLast SELECT " + + "'Sachin Tendulkar', true, STRUCT('Right Hand',200,15921,53.79)" + val insertNormalDataStructTypeMiddle = "INSERT INTO ST.CricketRecordMiddle SELECT " + + "'Rahul Drvaid',STRUCT('Right Hand',164,13288,52.31),false" + val insertNormalDataStructTypeFirst = "INSERT INTO ST.CricketRecordFirst " + + "SELECT STRUCT('Right Hand',131,5248,31.05), 'Kapil Dev', true" + + val selectStructLast = "SELECT * FROM ST.CricketRecordLast" + val selectStructMiddle = "SELECT * FROM ST.CricketRecordMiddle" + val selectStructFirst = "SELECT * FROM ST.CricketRecordFirst" + + val dropTableStructLast = "DROP TABLE ST.CricketRecordLast" + val dropTableStructMiddle = "DROP TABLE ST.CricketRecordMiddle" + val dropTableStructFirst = "DROP TABLE ST.CricketRecordFirst" + /* ----- ArrayOfStruct Type ----- */ /* ----- Snappy ArrayOfStruct Type Queries ----- */ diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/MapType.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/MapType.scala index a7e0ebb83c..d70cc79d23 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/MapType.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/MapType.scala @@ -42,6 +42,7 @@ class MapType extends SnappySQLJob{ val dataLocation = jobConfig.getString("dataFilesLocation") val pw : PrintWriter = new PrintWriter(new FileOutputStream(new File(outputFile), false)) val printContent : Boolean = false + val isExecute : Boolean = false snc.sql("DROP TABLE IF EXISTS TempStRecord") snc.sql("DROP TABLE IF EXISTS StudentMarksRecord") @@ -53,12 +54,18 @@ class MapType extends SnappySQLJob{ snc.sql("CREATE TABLE IF NOT EXISTS StudentMarksRecord USING COLUMN " + "AS (SELECT * FROM TempStRecord)") + /** + * No need to execute below queries because validation routine runs it. + * Keep it here for ready reference. + */ + if(isExecute) { snc.sql(ComplexTypeUtils.Map_Q1) snc.sql(ComplexTypeUtils.Map_Q2) snc.sql(ComplexTypeUtils.Map_Q3) snc.sql(ComplexTypeUtils.Map_Q4) snc.sql(ComplexTypeUtils.Map_Q5) snc.sql(ComplexTypeUtils.Map_Q6) + } if(printContent) { println("snc Map_Q1:" + snc.sql(ComplexTypeUtils.Map_Q1).show) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/MapTypeNULLValue.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/MapTypeNULLValue.scala new file mode 100644 index 0000000000..a196d912ff --- /dev/null +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/MapTypeNULLValue.scala @@ -0,0 +1,126 @@ +/* + * Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you + * may not use this file except in compliance with the License. You + * may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing + * permissions and limitations under the License. See accompanying + * LICENSE file. + */ +package io.snappydata.hydra.complexdatatypes + +import java.io.{File, FileOutputStream, PrintWriter} + +import com.typesafe.config.Config +import org.apache.spark.sql._ + +class MapTypeNULLValue extends SnappySQLJob{ + override def isValidJob(sc: SnappySession, config: Config): SnappyJobValidation = SnappyJobValid() + + override def runSnappyJob(snappySession: SnappySession, jobConfig: Config): Any = { + // scalastyle:off println + println("Validation for NULL Value in Map Type column Job started...") + val snc : SnappyContext = snappySession.sqlContext + val spark : SparkSession = SparkSession.builder().enableHiveSupport().getOrCreate() + val sqlContext : SQLContext = spark.sqlContext + val outputFile = "ValidateMapTypeNULLValue" + "_" + + System.currentTimeMillis() + jobConfig.getString("logFileName") + val pw : PrintWriter = new PrintWriter(new FileOutputStream(new File(outputFile), false)) + + /** + * Test : NULL value in Map Type column. + */ + snc.sql(ComplexTypeUtils.createSchemaST) + spark.sql(ComplexTypeUtils.createSchemaST) + + /** + * Test Case 1 : MapType Column is last column in the table. + */ + snc.sql(ComplexTypeUtils.createTableLastColumnMapType) + spark.sql(ComplexTypeUtils.createTableInSparkMapTypeLastColumn) + snc.sql(ComplexTypeUtils.insertNULLMapTypeLast) + spark.sql(ComplexTypeUtils.insertNULLMapTypeLast) + snc.sql(ComplexTypeUtils.insertNormalDataMapTypeLast) + spark.sql(ComplexTypeUtils.insertNormalDataMapTypeLast) + /** + * Test Case 2 : MapType Column is between (say middle) the other data types in the table. + */ + snc.sql(ComplexTypeUtils.createTableMiddleColumnMapType) + spark.sql(ComplexTypeUtils.createTableInSparkMapTypeMiddleColumn) + snc.sql(ComplexTypeUtils.insertNULLMapTypeMiddle) + spark.sql(ComplexTypeUtils.insertNULLMapTypeMiddle) + snc.sql(ComplexTypeUtils.insertNormalDataMapTypeMiddle) + spark.sql(ComplexTypeUtils.insertNormalDataMapTypeMiddle) + /** + * Test Case 3: ArrayType Column is the first column in the table. + */ + snc.sql(ComplexTypeUtils.createTableFirstColumnMapType) + spark.sql(ComplexTypeUtils.createTableInSparkMapTypeFirstColumn) + snc.sql(ComplexTypeUtils.insertNULLMapTypeFirst) + spark.sql(ComplexTypeUtils.insertNULLMapTypeFirst) + snc.sql(ComplexTypeUtils.insertNormalDataMapTypeFirst) + spark.sql(ComplexTypeUtils.insertNormalDataMapTypeFirst) + /** + * Validation Routine + */ + val snappyDFLast = snc.sql(ComplexTypeUtils.selectMapLast) + val sparkDFLast = spark.sql(ComplexTypeUtils.selectMapLast) + val df1Last = snappyDFLast.collect() + val df2Last = sparkDFLast.collect() + var result1 = df1Last.mkString(",") + var result2 = df2Last.mkString(",") + pw.println(result1) + pw.println(result2) + if(df1Last.equals(df2Last)) { + pw.println("-- Insertion of NULL value in MapType last column OK --") + } else { + pw.println("-- Insertion of NULL value in MapType last column OK --") + } + pw.flush() + val snappyDFMiddle = snc.sql(ComplexTypeUtils.selectMapMiddle) + val sparkDFMiddle = spark.sql(ComplexTypeUtils.selectMapMiddle) + val df1Middle = snappyDFMiddle.collect() + val df2Middle = sparkDFMiddle.collect() + result1 = df1Middle.mkString(",") + result2 = df2Middle.mkString(",") + pw.println(result1) + pw.println(result2) + if(df1Middle.equals(df2Middle)) { + pw.println("-- Insertion of NULL value in MapType Middle column OK --") + } else { + pw.println("-- Insertion of NULL value in MapType Middle column OK --") + } + pw.flush() + val snappyDFFirst = snc.sql(ComplexTypeUtils.selectMapFirst) + val sparkDFFirst = spark.sql(ComplexTypeUtils.selectMapFirst) + val df1First = snappyDFFirst.collect() + val df2First = sparkDFFirst.collect() + result1 = df1First.mkString(",") + result2 = df2First.mkString(",") + pw.println(result1) + pw.println(result2) + if(df1First.equals(df2First)) { + pw.println("-- Insertion of NULL value in MapType First column OK --") + } else { + pw.println("-- Insertion of NULL value in MapType First column OK --") + } + pw.flush() + pw.close() + + snc.sql(ComplexTypeUtils.dropTableMapLast) + snc.sql(ComplexTypeUtils.dropTableMapMiddle) + snc.sql(ComplexTypeUtils.dropTableMapFirst) + spark.sql(ComplexTypeUtils.dropTableMapLast) + spark.sql(ComplexTypeUtils.dropTableMapMiddle) + spark.sql(ComplexTypeUtils.dropTableMapFirst) + snc.sql(ComplexTypeUtils.dropDatabaseST) + spark.sql(ComplexTypeUtils.dropDatabaseST) + } +} diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorAllMixedType.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorAllMixedType.scala index f7d40ce089..f70b5b1a64 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorAllMixedType.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorAllMixedType.scala @@ -43,6 +43,7 @@ object SmartConnectorAllMixedType { new File("ValidateSmartConnectorAllMixedType" + "_" + "column" + System.currentTimeMillis()) , false)) val printContent : Boolean = false + val isExecute : Boolean = false /* --- Snappy Job --- */ snc.sql("DROP TABLE IF EXISTS TwentyTwenty") @@ -53,11 +54,17 @@ object SmartConnectorAllMixedType { snc.sql("CREATE TABLE IF NOT EXISTS TwentyTwenty USING COLUMN " + "AS (SELECT * FROM TempTwenty)") + /** + * No need to execute below queries because validation routine runs it. + * Keep it here for ready reference. + */ + if(isExecute) { snc.sql(ComplexTypeUtils.Mixed_Q1) snc.sql(ComplexTypeUtils.Mixed_Q2) snc.sql(ComplexTypeUtils.Mixed_Q3) snc.sql(ComplexTypeUtils.Mixed_Q4) snc.sql(ComplexTypeUtils.Mixed_Q5) + } if(printContent) { println("snc : Mixed_Q1 " + (snc.sql(ComplexTypeUtils.Mixed_Q1).show)) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArrayType.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArrayType.scala index 9b72c0e283..38909de2bb 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArrayType.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArrayType.scala @@ -44,6 +44,7 @@ object SmartConnectorArrayType { new File("ValidateSmartConnectorArrayType" + "_" + "column" + System.currentTimeMillis()) , false)) val printContent : Boolean = false + val isExecute : Boolean = false /* --- Snappy Job --- */ snc.sql("DROP TABLE IF EXISTS Student") @@ -54,12 +55,18 @@ object SmartConnectorArrayType { "OPTIONS(path '" + dataLocation + "')") snc.sql("CREATE TABLE Student USING COLUMN AS (SELECT * FROM TempArray)") + /** + * No need to execute below queries because validation routine runs it. + * Keep it here for ready reference. + */ + if(isExecute) { snc.sql(ComplexTypeUtils.Array_Q1) snc.sql(ComplexTypeUtils.Array_Q2) snc.sql(ComplexTypeUtils.Array_Q3) snc.sql(ComplexTypeUtils.Array_View) snc.sql(ComplexTypeUtils.Array_Q4) snc.sql(ComplexTypeUtils.Array_Q5) + } if(printContent) { println(snc.sql(ComplexTypeUtils.Array_Q1).show()) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArrayTypeNULL.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArrayTypeNULL.scala new file mode 100644 index 0000000000..f4b07ee9e3 --- /dev/null +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArrayTypeNULL.scala @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you + * may not use this file except in compliance with the License. You + * may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing + * permissions and limitations under the License. See accompanying + * LICENSE file. + */ +package io.snappydata.hydra.complexdatatypes + +import java.io.{File, FileOutputStream, PrintWriter} +import io.snappydata.hydra.SnappyTestUtils +import org.apache.spark.{SparkConf, SparkContext} +import org.apache.spark.sql.{SQLContext, SnappyContext, SparkSession} + +object SmartConnectorArrayTypeNULL { + def main(args: Array[String]): Unit = { + // scalastyle:off println + println("Smart Connector NULL value in ArraysType Job started...") + val connectionURL = args(args.length - 1) + println("Connection URL is : " + connectionURL) + val conf = new SparkConf() + .setAppName("Spark_ComplexType_ArrayTypeNULL_Validation") + .set("snappydata.connection", connectionURL) + val sc : SparkContext = SparkContext.getOrCreate(conf) + val snc : SnappyContext = SnappyContext(sc) + val spark : SparkSession = SparkSession.builder().enableHiveSupport().config(conf).getOrCreate() + val sqlContext = spark.sqlContext + + val pw : PrintWriter = new PrintWriter(new FileOutputStream( + new File("ValidateSmartConnectorArrayTypeNULL" + "_" + "column" + System.currentTimeMillis()) + , false)) + + /** + * Test : NULL value in Array Type column. + */ + snc.sql(ComplexTypeUtils.createSchemaST) + spark.sql(ComplexTypeUtils.createSchemaST) + /** + * Test Case 1 : ArrayType Column is last column in the table. + */ + snc.sql(ComplexTypeUtils.createTableLastColumnArrayType) + spark.sql(ComplexTypeUtils.createTableInSparkArrTypeLastColumn) + snc.sql(ComplexTypeUtils.insertNullInLastColumn) + spark.sql(ComplexTypeUtils.insertNullInLastColumn) + snc.sql(ComplexTypeUtils.insertNormalDataLastColumn) + spark.sql(ComplexTypeUtils.insertNormalDataLastColumn) + /** + * Test Case 2 : ArrayType Column is between (say middle) the other data types in the table. + */ + snc.sql(ComplexTypeUtils.createTableMiddleColumnArrayType) + spark.sql(ComplexTypeUtils.createTableInSparkArrayTypeMiddleColumn) + snc.sql(ComplexTypeUtils.insertNullInMiddleColumn) + spark.sql(ComplexTypeUtils.insertNullInMiddleColumn) + snc.sql(ComplexTypeUtils.insertNormalDataMiddleColumn) + spark.sql(ComplexTypeUtils.insertNormalDataMiddleColumn) + /** + * Test Case 3: ArrayType Column is the first column in the table. + */ + snc.sql(ComplexTypeUtils.createTableFirstColumnArrayType) + spark.sql(ComplexTypeUtils.createTableInSparkArrayTypeFirstColumn) + snc.sql(ComplexTypeUtils.insertNullInFirstColumn) + spark.sql(ComplexTypeUtils.insertNullInFirstColumn) + snc.sql(ComplexTypeUtils.insertNormalDataFirstColumn) + spark.sql(ComplexTypeUtils.insertNormalDataFirstColumn) + /** + * Validation Routine + */ + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectLastColumn, + "smartConnectorAQ1", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in ArrayType last column OK --") + pw.flush() + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectMiddleColumn, + "smartConnectorAQ2", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in ArrayType middle column OK --") + pw.flush() + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectFirstColumn, + "smartConnectorAQ3", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in ArrayType first column OK --") + pw.flush() + pw.close() + + snc.sql(ComplexTypeUtils.dropTableStudentLast) + snc.sql(ComplexTypeUtils.dropTableStudentMiddle) + snc.sql(ComplexTypeUtils.dropTableStudentMiddle) + spark.sql(ComplexTypeUtils.dropTableStudentLast) + spark.sql(ComplexTypeUtils.dropTableStudentMiddle) + spark.sql(ComplexTypeUtils.dropTableStudentLast) + snc.sql(ComplexTypeUtils.dropDatabaseST) + spark.sql(ComplexTypeUtils.dropDatabaseST) + } +} diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArraysOfStringInMapAsValue.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArraysOfStringInMapAsValue.scala index d46120b53e..74ac09e5c5 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArraysOfStringInMapAsValue.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArraysOfStringInMapAsValue.scala @@ -44,6 +44,7 @@ object SmartConnectorArraysOfStringInMapAsValue { "_" + "column" + System.currentTimeMillis()) , false)) val printContent : Boolean = false + val isExecute : Boolean = false /* --- Snappy Job --- */ snc.sql("DROP TABLE IF EXISTS TempFamousPeople") @@ -56,10 +57,16 @@ object SmartConnectorArraysOfStringInMapAsValue { val fp = snc.sql( "CREATE TABLE IF NOT EXISTS FamousPeople " + "USING COLUMN AS (SELECT * FROM TempFamousPeople)") + /** + * No need to execute below queries because validation routine runs it. + * Keep it here for ready reference. + */ + if(isExecute) { snc.sql(ComplexTypeUtils.Array_Map_TempView) snc.sql(ComplexTypeUtils.Array_Map_Q1) snc.sql(ComplexTypeUtils.Array_Map_Q2) snc.sql(ComplexTypeUtils.Array_Map_Q3) + } if(printContent) { println("snc : Array_Map_Q1 " + snc.sql(ComplexTypeUtils.Array_Map_Q1).show) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArraysOfStructType.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArraysOfStructType.scala index 0917807591..d5e3e38388 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArraysOfStructType.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArraysOfStructType.scala @@ -44,6 +44,9 @@ object SmartConnectorArraysOfStructType { "_" + "column" + System.currentTimeMillis()) , false)) val printContent : Boolean = true + // Keep the isExecute value true because in validation + // routine there is a Hydra Exception which needs to be fixed. + val isExecute : Boolean = true /* --- Snappy Job --- */ snc.sql("DROP TABLE IF EXISTS TwoWheeler") @@ -54,11 +57,13 @@ object SmartConnectorArraysOfStructType { snc.sql("CREATE TABLE IF NOT EXISTS TwoWheeler USING COLUMN " + "AS (SELECT * FROM TempBike)") + if(isExecute) { snc.sql(ComplexTypeUtils.ArraysOfStruct_Q1) snc.sql(ComplexTypeUtils.ArraysOfStruct_Q2) snc.sql(ComplexTypeUtils.ArraysOfStruct_Q3) snc.sql(ComplexTypeUtils.ArraysOfStruct_Q4) snc.sql(ComplexTypeUtils.ArraysOfStruct_Q5) + } if(printContent) { println("snc : ArraysOfStruct_Q1 " + (snc.sql(ComplexTypeUtils.ArraysOfStruct_Q1).show())) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorMapType.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorMapType.scala index 00bc1b9a21..0c9e5d9454 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorMapType.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorMapType.scala @@ -43,6 +43,7 @@ object SmartConnectorMapType { new File("ValidateSmartConnectorMapType" + "_" + "column" + System.currentTimeMillis()) , false)) val printContent : Boolean = false + val isExecute : Boolean = false snc.sql("DROP TABLE IF EXISTS TempStRecord") snc.sql("DROP TABLE IF EXISTS StudentMarksRecord") @@ -54,12 +55,18 @@ object SmartConnectorMapType { snc.sql("CREATE TABLE IF NOT EXISTS StudentMarksRecord USING COLUMN " + "AS (SELECT * FROM TempStRecord)") + /** + * No need to execute below queries because validation routine runs it. + * Keep it here for ready reference. + */ + if(isExecute) { snc.sql(ComplexTypeUtils.Map_Q1) snc.sql(ComplexTypeUtils.Map_Q2) snc.sql(ComplexTypeUtils.Map_Q3) snc.sql(ComplexTypeUtils.Map_Q4) snc.sql(ComplexTypeUtils.Map_Q5) snc.sql(ComplexTypeUtils.Map_Q6) + } if(printContent) { println("snc Map_Q1:" + snc.sql(ComplexTypeUtils.Map_Q1).show) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorMapTypeNULL.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorMapTypeNULL.scala new file mode 100644 index 0000000000..8232790ed7 --- /dev/null +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorMapTypeNULL.scala @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you + * may not use this file except in compliance with the License. You + * may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing + * permissions and limitations under the License. See accompanying + * LICENSE file. + */ +package io.snappydata.hydra.complexdatatypes + +import java.io.{File, FileOutputStream, PrintWriter} + +import org.apache.spark.sql.{SnappyContext, SparkSession} +import org.apache.spark.{SparkConf, SparkContext} + +object SmartConnectorMapTypeNULL { + def main(args: Array[String]): Unit = { + // scalastyle:off println + println("Smart Connector NULL value in MapType Job started...") + val connectionURL = args(args.length - 1) + println("Connection URL is : " + connectionURL) + val conf = new SparkConf() + .setAppName("Spark_ComplexType_MapTypeNULL_Validation") + .set("snappydata.connection", connectionURL) + val sc : SparkContext = SparkContext.getOrCreate(conf) + val snc : SnappyContext = SnappyContext(sc) + val spark : SparkSession = SparkSession.builder().enableHiveSupport().config(conf).getOrCreate() + val sqlContext = spark.sqlContext + val pw : PrintWriter = new PrintWriter(new FileOutputStream( + new File("ValidateSmartConnectorMapTypeNULL" + "_" + "column" + System.currentTimeMillis()) + , false)) + /** + * Test : NULL value in Map Type column. + */ + snc.sql(ComplexTypeUtils.createSchemaST) + spark.sql(ComplexTypeUtils.createSchemaST) + + /** + * Test Case 1 : MapType Column is last column in the table. + */ + snc.sql(ComplexTypeUtils.createTableLastColumnMapType) + spark.sql(ComplexTypeUtils.createTableInSparkMapTypeLastColumn) + snc.sql(ComplexTypeUtils.insertNULLMapTypeLast) + spark.sql(ComplexTypeUtils.insertNULLMapTypeLast) + snc.sql(ComplexTypeUtils.insertNormalDataMapTypeLast) + spark.sql(ComplexTypeUtils.insertNormalDataMapTypeLast) + /** + * Test Case 2 : MapType Column is between (say middle) the other data types in the table. + */ + snc.sql(ComplexTypeUtils.createTableMiddleColumnMapType) + spark.sql(ComplexTypeUtils.createTableInSparkMapTypeMiddleColumn) + snc.sql(ComplexTypeUtils.insertNULLMapTypeMiddle) + spark.sql(ComplexTypeUtils.insertNULLMapTypeMiddle) + snc.sql(ComplexTypeUtils.insertNormalDataMapTypeMiddle) + spark.sql(ComplexTypeUtils.insertNormalDataMapTypeMiddle) + /** + * Test Case 3: ArrayType Column is the first column in the table. + */ + snc.sql(ComplexTypeUtils.createTableFirstColumnMapType) + spark.sql(ComplexTypeUtils.createTableInSparkMapTypeFirstColumn) + snc.sql(ComplexTypeUtils.insertNULLMapTypeFirst) + spark.sql(ComplexTypeUtils.insertNULLMapTypeFirst) + snc.sql(ComplexTypeUtils.insertNormalDataMapTypeFirst) + spark.sql(ComplexTypeUtils.insertNormalDataMapTypeFirst) + /** + * Validation Routine + */ + val snappyDFLast = snc.sql(ComplexTypeUtils.selectMapLast) + val sparkDFLast = spark.sql(ComplexTypeUtils.selectMapLast) + val df1Last = snappyDFLast.collect() + val df2Last = sparkDFLast.collect() + var result1 = df1Last.mkString(",") + var result2 = df2Last.mkString(",") + pw.println(result1) + pw.println(result2) + if(df1Last.equals(df2Last)) { + pw.println("-- Insertion of NULL value in MapType last column OK --") + } else { + pw.println("-- Insertion of NULL value in MapType last column OK --") + } + pw.flush() + val snappyDFMiddle = snc.sql(ComplexTypeUtils.selectMapMiddle) + val sparkDFMiddle = spark.sql(ComplexTypeUtils.selectMapMiddle) + val df1Middle = snappyDFMiddle.collect() + val df2Middle = sparkDFMiddle.collect() + result1 = df1Middle.mkString(",") + result2 = df2Middle.mkString(",") + pw.println(result1) + pw.println(result2) + if(df1Middle.equals(df2Middle)) { + pw.println("-- Insertion of NULL value in MapType Middle column OK --") + } else { + pw.println("-- Insertion of NULL value in MapType Middle column OK --") + } + pw.flush() + val snappyDFFirst = snc.sql(ComplexTypeUtils.selectMapFirst) + val sparkDFFirst = spark.sql(ComplexTypeUtils.selectMapFirst) + val df1First = snappyDFFirst.collect() + val df2First = sparkDFFirst.collect() + result1 = df1First.mkString(",") + result2 = df2First.mkString(",") + pw.println(result1) + pw.println(result2) + if(df1First.equals(df2First)) { + pw.println("-- Insertion of NULL value in MapType First column OK --") + } else { + pw.println("-- Insertion of NULL value in MapType First column OK --") + } + pw.flush() + pw.close() + snc.sql(ComplexTypeUtils.dropTableMapLast) + snc.sql(ComplexTypeUtils.dropTableMapMiddle) + snc.sql(ComplexTypeUtils.dropTableMapFirst) + spark.sql(ComplexTypeUtils.dropTableMapLast) + spark.sql(ComplexTypeUtils.dropTableMapMiddle) + spark.sql(ComplexTypeUtils.dropTableMapFirst) + snc.sql(ComplexTypeUtils.dropDatabaseST) + spark.sql(ComplexTypeUtils.dropDatabaseST) + + } +} diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorStructType.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorStructType.scala index 5a8d81fced..cc3162357d 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorStructType.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorStructType.scala @@ -43,6 +43,7 @@ object SmartConnectorStructType { new File("ValidateSmartConnectorStructType" + "_" + "column" + System.currentTimeMillis()) , false)) val printContent : Boolean = false + val isExecute : Boolean = false /* --- Snappy Job --- */ snc.sql("DROP TABLE IF EXISTS CricketRecord") @@ -53,10 +54,16 @@ object SmartConnectorStructType { snc.sql("CREATE TABLE IF NOT EXISTS CricketRecord USING COLUMN " + "AS (SELECT * FROM TempCRRecord)") + /** + * No need to execute below queries because validation routine runs it. + * Keep it here for ready reference. + */ + if(isExecute) { snc.sql(ComplexTypeUtils.Struct_Q1) snc.sql(ComplexTypeUtils.Struct_Q2) snc.sql(ComplexTypeUtils.Struct_Q3) snc.sql(ComplexTypeUtils.Struct_Q4) + } if(printContent) { println("Struct_Q1 : " + snc.sql(ComplexTypeUtils.Struct_Q1).show()) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorStructTypeNULL.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorStructTypeNULL.scala new file mode 100644 index 0000000000..ba773787a9 --- /dev/null +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorStructTypeNULL.scala @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you + * may not use this file except in compliance with the License. You + * may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing + * permissions and limitations under the License. See accompanying + * LICENSE file. + */ +package io.snappydata.hydra.complexdatatypes + +import java.io.{File, FileOutputStream, PrintWriter} + +import io.snappydata.hydra.SnappyTestUtils +import org.apache.spark.{SparkConf, SparkContext} +import org.apache.spark.sql.{SnappyContext, SparkSession} + +object SmartConnectorStructTypeNULL { + def main(args: Array[String]): Unit = { + // scalastyle:off println + println("Smart Connector NULL value in StructType Job started...") + val connectionURL = args(args.length - 1) + println("Connection URL is : " + connectionURL) + val conf = new SparkConf() + .setAppName("Spark_ComplexType_StructTypeNULL_Validation") + .set("snappydata.connection", connectionURL) + val sc : SparkContext = SparkContext.getOrCreate(conf) + val snc : SnappyContext = SnappyContext(sc) + val spark : SparkSession = SparkSession.builder().enableHiveSupport().config(conf).getOrCreate() + val sqlContext = spark.sqlContext + val pw : PrintWriter = new PrintWriter(new FileOutputStream( + new File("ValidateSmartConnectorStructTypeNULL" + "_" + "column" + System.currentTimeMillis()) + , false)) + /** + * Test : NULL value in Map Type column. + */ + snc.sql(ComplexTypeUtils.createSchemaST) + spark.sql(ComplexTypeUtils.createSchemaST) + /** + * Test Case 1 : Struct Type Column is last column in the table. + */ + snc.sql(ComplexTypeUtils.createTableLastColumnStructType) + spark.sql(ComplexTypeUtils.createTableInSparkStructTypeLastColumn) + snc.sql(ComplexTypeUtils.insertNULLStructTypeLast) + spark.sql(ComplexTypeUtils.insertNULLStructTypeLast) + snc.sql(ComplexTypeUtils.insertNormalDataStructTypeLast) + spark.sql(ComplexTypeUtils.insertNormalDataStructTypeLast) + /** + * Test Case 2 : StructType Column is between (say middle) the other data types in the table. + */ + snc.sql(ComplexTypeUtils.createTableMiddleColumnStructType) + spark.sql(ComplexTypeUtils.createTableInSparkStructTypeMiddleColumn) + snc.sql(ComplexTypeUtils.insertNULLStructTypeMiddle) + spark.sql(ComplexTypeUtils.insertNULLStructTypeMiddle) + snc.sql(ComplexTypeUtils.insertNormalDataStructTypeMiddle) + spark.sql(ComplexTypeUtils.insertNormalDataStructTypeMiddle) + /** + * Test Case 3: StructType Column is the first column in the table. + */ + snc.sql(ComplexTypeUtils.createTableFirstColumnStructType) + spark.sql(ComplexTypeUtils.createTableInSparkStructTypeFirstColumn) + snc.sql(ComplexTypeUtils.insertNULLStructTypeFirst) + spark.sql(ComplexTypeUtils.insertNULLStructTypeFirst) + snc.sql(ComplexTypeUtils.insertNormalDataStructTypeFirst) + spark.sql(ComplexTypeUtils.insertNormalDataStructTypeFirst) + /** + * Validation Routine + * + */ + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectStructLast, + "SmartConnectorSQ1", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in StructType last column OK --") + pw.flush() + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectStructMiddle, + "SmartConnectorSQ2", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in StructType middle column OK --") + pw.flush() + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectStructFirst, + "SmartConnectorSQ3", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in StructType first column OK --") + pw.flush() + pw.close() + snc.sql(ComplexTypeUtils.dropTableStructLast) + snc.sql(ComplexTypeUtils.dropTableStructMiddle) + snc.sql(ComplexTypeUtils.dropTableStructFirst) + spark.sql(ComplexTypeUtils.dropTableStructLast) + spark.sql(ComplexTypeUtils.dropTableStructMiddle) + spark.sql(ComplexTypeUtils.dropTableStructFirst) + snc.sql(ComplexTypeUtils.dropDatabaseST) + spark.sql(ComplexTypeUtils.dropDatabaseST) + + } +} diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/StructType.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/StructType.scala index dfd00b00c2..75e18f0ffc 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/StructType.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/StructType.scala @@ -42,6 +42,7 @@ class StructType extends SnappySQLJob{ val pw : PrintWriter = new PrintWriter(new FileOutputStream(new File(outputFile), false)) val sqlContext = SQLContext.getOrCreate(sc) val printContent : Boolean = false + val isExecute : Boolean = false /* --- Snappy Job --- */ snc.sql("DROP TABLE IF EXISTS CricketRecord") @@ -52,10 +53,16 @@ class StructType extends SnappySQLJob{ snc.sql("CREATE TABLE IF NOT EXISTS CricketRecord USING COLUMN " + "AS (SELECT * FROM TempCRRecord)") + /** + * No need to execute below queries because validation routine runs it. + * Keep it here for ready reference. + */ + if(isExecute) { snc.sql(ComplexTypeUtils.Struct_Q1) snc.sql(ComplexTypeUtils.Struct_Q2) snc.sql(ComplexTypeUtils.Struct_Q3) snc.sql(ComplexTypeUtils.Struct_Q4) + } if(printContent) { println("Struct_Q1 : " + snc.sql(ComplexTypeUtils.Struct_Q1).show()) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/StructTypeNULLValue.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/StructTypeNULLValue.scala new file mode 100644 index 0000000000..47d0caba5b --- /dev/null +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/StructTypeNULLValue.scala @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you + * may not use this file except in compliance with the License. You + * may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing + * permissions and limitations under the License. See accompanying + * LICENSE file. + */ +package io.snappydata.hydra.complexdatatypes + +import java.io.{File, FileOutputStream, PrintWriter} + +import com.typesafe.config.Config +import io.snappydata.hydra.SnappyTestUtils +import org.apache.spark.sql._ + +class StructTypeNULLValue extends SnappySQLJob { + override def isValidJob(sc: SnappySession, config: Config): SnappyJobValidation = SnappyJobValid() + + override def runSnappyJob(snappySession: SnappySession, jobConfig: Config): Any = { + // scalastyle:off println + println("Validation for NULL Value in Struct Type column Job started...") + val snc : SnappyContext = snappySession.sqlContext + val spark : SparkSession = SparkSession.builder().enableHiveSupport().getOrCreate() + val sqlContext : SQLContext = spark.sqlContext + val outputFile = "ValidateStructTypeNULLValue" + "_" + + System.currentTimeMillis() + jobConfig.getString("logFileName") + val pw : PrintWriter = new PrintWriter(new FileOutputStream(new File(outputFile), false)) + /** + * Test : NULL value in Map Type column. + */ + snc.sql(ComplexTypeUtils.createSchemaST) + spark.sql(ComplexTypeUtils.createSchemaST) + /** + * Test Case 1 : Struct Type Column is last column in the table. + */ + snc.sql(ComplexTypeUtils.createTableLastColumnStructType) + spark.sql(ComplexTypeUtils.createTableInSparkStructTypeLastColumn) + snc.sql(ComplexTypeUtils.insertNULLStructTypeLast) + spark.sql(ComplexTypeUtils.insertNULLStructTypeLast) + snc.sql(ComplexTypeUtils.insertNormalDataStructTypeLast) + spark.sql(ComplexTypeUtils.insertNormalDataStructTypeLast) + /** + * Test Case 2 : StructType Column is between (say middle) the other data types in the table. + */ + snc.sql(ComplexTypeUtils.createTableMiddleColumnStructType) + spark.sql(ComplexTypeUtils.createTableInSparkStructTypeMiddleColumn) + snc.sql(ComplexTypeUtils.insertNULLStructTypeMiddle) + spark.sql(ComplexTypeUtils.insertNULLStructTypeMiddle) + snc.sql(ComplexTypeUtils.insertNormalDataStructTypeMiddle) + spark.sql(ComplexTypeUtils.insertNormalDataStructTypeMiddle) + /** + * Test Case 3: StructType Column is the first column in the table. + */ + snc.sql(ComplexTypeUtils.createTableFirstColumnStructType) + spark.sql(ComplexTypeUtils.createTableInSparkStructTypeFirstColumn) + snc.sql(ComplexTypeUtils.insertNULLStructTypeFirst) + spark.sql(ComplexTypeUtils.insertNULLStructTypeFirst) + snc.sql(ComplexTypeUtils.insertNormalDataStructTypeFirst) + spark.sql(ComplexTypeUtils.insertNormalDataStructTypeFirst) + /** + * Validation Routine + * + */ + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectStructLast, + "SQ1", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in StructType last column OK --") + pw.flush() + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectStructMiddle, + "SQ2", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in StructType middle column OK --") + pw.flush() + SnappyTestUtils.assertQueryFullResultSet(snc, ComplexTypeUtils.selectStructFirst, + "SQ3", "column", pw, sqlContext) + pw.println("-- Insertion of NULL value in StructType first column OK --") + pw.flush() + pw.close() + snc.sql(ComplexTypeUtils.dropTableStructLast) + snc.sql(ComplexTypeUtils.dropTableStructMiddle) + snc.sql(ComplexTypeUtils.dropTableStructFirst) + spark.sql(ComplexTypeUtils.dropTableStructLast) + spark.sql(ComplexTypeUtils.dropTableStructMiddle) + spark.sql(ComplexTypeUtils.dropTableStructFirst) + snc.sql(ComplexTypeUtils.dropDatabaseST) + spark.sql(ComplexTypeUtils.dropDatabaseST) + } +} diff --git a/store b/store index fceceb8571..d0f2973663 160000 --- a/store +++ b/store @@ -1 +1 @@ -Subproject commit fceceb8571f6668d82275b5cee7380c84900ca7f +Subproject commit d0f297366377422145e53b6dadd8112242a58ed0