Skip to content

Commit

Permalink
Resolved conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
viktorluc-db committed Jan 24, 2025
2 parents 5e2e4d9 + 44966c9 commit 4cd3ffe
Show file tree
Hide file tree
Showing 464 changed files with 3,122 additions and 2,256 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,13 @@

import org.apache.spark.network.buffer.ManagedBuffer;

import java.io.IOException;
import java.nio.channels.WritableByteChannel;

/**
* A channel for writing data which is fetched to disk, which allows access to the written data only
* after the writer has been closed. Used with DownloadFile and DownloadFileManager.
*/
public interface DownloadFileWritableChannel extends WritableByteChannel {
ManagedBuffer closeAndRead();
ManagedBuffer closeAndRead() throws IOException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,8 @@ private class SimpleDownloadWritableChannel implements DownloadFileWritableChann
}

@Override
public ManagedBuffer closeAndRead() {
public ManagedBuffer closeAndRead() throws IOException {
channel.close();
return new FileSegmentManagedBuffer(transportConf, file, 0, file.length());
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.network.shuffle;

import org.apache.spark.network.util.MapConfigProvider;
import org.apache.spark.network.util.TransportConf;
import org.junit.jupiter.api.Test;

import java.io.File;
import java.io.IOException;

import org.junit.jupiter.api.Assertions;

public class SimpleDownloadFileSuite {
@Test
public void testChannelIsClosedAfterCloseAndRead() throws IOException {
File tempFile = File.createTempFile("testChannelIsClosed", ".tmp");
tempFile.deleteOnExit();
TransportConf conf = new TransportConf("test", MapConfigProvider.EMPTY);

DownloadFile downloadFile = null;
try {
downloadFile = new SimpleDownloadFile(tempFile, conf);
DownloadFileWritableChannel channel = downloadFile.openForWriting();
channel.closeAndRead();
Assertions.assertFalse(channel.isOpen(), "Channel should be closed after closeAndRead.");
} finally {
if (downloadFile != null) {
downloadFile.delete();
}
}
}
}
23 changes: 23 additions & 0 deletions common/utils/src/main/resources/error/error-conditions.json
Original file line number Diff line number Diff line change
Expand Up @@ -3135,6 +3135,24 @@
],
"sqlState" : "42836"
},
"INVALID_RECURSIVE_REFERENCE" : {
"message" : [
"Invalid recursive reference found inside WITH RECURSIVE clause."
],
"subClass" : {
"NUMBER" : {
"message" : [
"Multiple self-references to one recursive CTE are not allowed."
]
},
"PLACE" : {
"message" : [
"Recursive references cannot be used on the right side of left outer/semi/anti joins, on the left side of right outer joins, in full outer joins, in aggregates, and in subquery expressions."
]
}
},
"sqlState" : "42836"
},
"INVALID_REGEXP_REPLACE" : {
"message" : [
"Could not perform regexp_replace for source = \"<source>\", pattern = \"<pattern>\", replacement = \"<replacement>\" and position = <position>."
Expand Down Expand Up @@ -5259,6 +5277,11 @@
"Resilient Distributed Datasets (RDDs)."
]
},
"REGISTER_UDAF" : {
"message" : [
"Registering User Defined Aggregate Functions (UDAFs)."
]
},
"SESSION_BASE_RELATION_TO_DATAFRAME" : {
"message" : [
"Invoking SparkSession 'baseRelationToDataFrame'. This is server side developer API"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import java.util.UUID

import scala.jdk.CollectionConverters._

import org.apache.avro.{AvroTypeException, Schema, SchemaBuilder}
import org.apache.avro.{AvroTypeException, Schema, SchemaBuilder, SchemaFormatter}
import org.apache.avro.Schema.{Field, Type}
import org.apache.avro.Schema.Type._
import org.apache.avro.file.{DataFileReader, DataFileWriter}
Expand Down Expand Up @@ -86,7 +86,7 @@ abstract class AvroSuite
}

def getAvroSchemaStringFromFiles(filePath: String): String = {
new DataFileReader({
val schema = new DataFileReader({
val file = new File(filePath)
if (file.isFile) {
file
Expand All @@ -96,7 +96,8 @@ abstract class AvroSuite
.filter(_.getName.endsWith("avro"))
.head
}
}, new GenericDatumReader[Any]()).getSchema.toString(false)
}, new GenericDatumReader[Any]()).getSchema
SchemaFormatter.format(AvroUtils.JSON_INLINE_FORMAT, schema)
}

// Check whether an Avro schema of union type is converted to SQL in an expected way, when the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ import ammonite.util.Util.newLine

import org.apache.spark.SparkBuildInfo.spark_version
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.SparkSession.withLocalConnectServer
import org.apache.spark.sql.connect.SparkSession
import org.apache.spark.sql.connect.SparkSession.withLocalConnectServer
import org.apache.spark.sql.connect.client.{SparkConnectClient, SparkConnectClientParser}

/**
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
import static org.apache.spark.sql.functions.*;
import static org.apache.spark.sql.RowFactory.create;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.test.SparkConnectServerUtils;
import org.apache.spark.sql.connect.test.SparkConnectServerUtils;
import org.apache.spark.sql.types.StructType;

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@
package org.apache.spark.sql

import org.apache.spark.SparkRuntimeException
import org.apache.spark.sql.connect.test.{QueryTest, RemoteSparkSession}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.test.{QueryTest, RemoteSparkSession}

class DataFrameSubquerySuite extends QueryTest with RemoteSparkSession {
import testImplicits._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@

package org.apache.spark.sql

import org.apache.spark.sql.connect.test.{QueryTest, RemoteSparkSession}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.test.{QueryTest, RemoteSparkSession}

class DataFrameTableValuedFunctionsSuite extends QueryTest with RemoteSparkSession {
import testImplicits._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,13 @@ import org.apache.spark.sql.avro.{functions => avroFn}
import org.apache.spark.sql.catalyst.ScalaReflection
import org.apache.spark.sql.catalyst.encoders.AgnosticEncoders.StringEncoder
import org.apache.spark.sql.catalyst.util.CollationFactory
import org.apache.spark.sql.connect.{DataFrame, Dataset, SparkSession}
import org.apache.spark.sql.connect.ConnectConversions._
import org.apache.spark.sql.connect.client.SparkConnectClient
import org.apache.spark.sql.connect.test.{ConnectFunSuite, IntegrationTestUtils}
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.protobuf.{functions => pbFn}
import org.apache.spark.sql.test.{ConnectFunSuite, IntegrationTestUtils}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
import org.apache.spark.util.SparkFileUtils
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark.sql

import org.apache.spark.sql.test.{QueryTest, RemoteSparkSession}
import org.apache.spark.sql.connect.test.{QueryTest, RemoteSparkSession}
import org.apache.spark.unsafe.types.VariantVal

class SQLExpressionsSuite extends QueryTest with RemoteSparkSession {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import scala.util.Properties
import org.apache.commons.io.output.ByteArrayOutputStream
import org.scalatest.BeforeAndAfterEach

import org.apache.spark.sql.test.{ConnectFunSuite, IntegrationTestUtils, RemoteSparkSession}
import org.apache.spark.sql.connect.test.{ConnectFunSuite, IntegrationTestUtils, RemoteSparkSession}
import org.apache.spark.tags.AmmoniteTest
import org.apache.spark.util.IvyTestUtils
import org.apache.spark.util.MavenUtils.MavenCoordinate
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,15 @@
* limitations under the License.
*/

package org.apache.spark.sql
package org.apache.spark.sql.connect

import java.io.{File, FilenameFilter}

import org.apache.commons.io.FileUtils

import org.apache.spark.SparkException
import org.apache.spark.sql.test.{ConnectFunSuite, RemoteSparkSession, SQLHelper}
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.connect.test.{ConnectFunSuite, RemoteSparkSession, SQLHelper}
import org.apache.spark.sql.types.{DoubleType, LongType, StructType}
import org.apache.spark.storage.StorageLevel

Expand Down
Loading

0 comments on commit 4cd3ffe

Please sign in to comment.