data-platform team mailing list archive
-
data-platform team
-
Mailing list archive
-
Message #00151
[Merge] ~fabiantoepfer/soss/+source/charmed-spark:dpe-3632-triage-cves into soss/+source/charmed-spark:lp-3.4.2
Fabian Toepfer has proposed merging ~fabiantoepfer/soss/+source/charmed-spark:dpe-3632-triage-cves into soss/+source/charmed-spark:lp-3.4.2.
Requested reviews:
Canonical Data Platform (data-platform)
For more details, see:
https://code.launchpad.net/~fabiantoepfer/soss/+source/charmed-spark/+git/charmed-spark/+merge/462812
--
Your team Canonical Data Platform is requested to review the proposed merge of ~fabiantoepfer/soss/+source/charmed-spark:dpe-3632-triage-cves into soss/+source/charmed-spark:lp-3.4.2.
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 8b4ef1d..18624c9 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -1388,6 +1388,14 @@ private[spark] object SparkSubmitUtils extends Logging {
val ivySettings: IvySettings = new IvySettings
try {
ivySettings.load(file)
+ if (ivySettings.getDefaultIvyUserDir == null && ivySettings.getDefaultCache == null) {
+ // To protect old Ivy-based systems like old Spark from Apache Ivy 2.5.2's incompatibility.
+ // `processIvyPathArg` can overwrite these later.
+ val alternateIvyDir = System.getProperty("ivy.home",
+ System.getProperty("user.home") + File.separator + ".ivy2.5.2")
+ ivySettings.setDefaultIvyUserDir(new File(alternateIvyDir))
+ ivySettings.setDefaultCache(new File(alternateIvyDir, "cache"))
+ }
} catch {
case e @ (_: IOException | _: ParseException) =>
throw new SparkException(s"Failed when loading Ivy settings from $settingsFile", e)
@@ -1399,10 +1407,13 @@ private[spark] object SparkSubmitUtils extends Logging {
/* Set ivy settings for location of cache, if option is supplied */
private def processIvyPathArg(ivySettings: IvySettings, ivyPath: Option[String]): Unit = {
- ivyPath.filterNot(_.trim.isEmpty).foreach { alternateIvyDir =>
- ivySettings.setDefaultIvyUserDir(new File(alternateIvyDir))
- ivySettings.setDefaultCache(new File(alternateIvyDir, "cache"))
+ val alternateIvyDir = ivyPath.filterNot(_.trim.isEmpty).getOrElse {
+ // To protect old Ivy-based systems like old Spark from Apache Ivy 2.5.2's incompatibility.
+ System.getProperty("ivy.home",
+ System.getProperty("user.home") + File.separator + ".ivy2.5.2")
}
+ ivySettings.setDefaultIvyUserDir(new File(alternateIvyDir))
+ ivySettings.setDefaultCache(new File(alternateIvyDir, "cache"))
}
/* Add any optional additional remote repositories */
diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala
index be210cf..6adcf0d 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/package.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala
@@ -2383,10 +2383,10 @@ package object config {
.doc("Path to specify the Ivy user directory, used for the local Ivy cache and " +
"package files from spark.jars.packages. " +
"This will override the Ivy property ivy.default.ivy.user.dir " +
- "which defaults to ~/.ivy2.")
+ "which defaults to ~/.ivy2.5.2")
.version("1.3.0")
.stringConf
- .createOptional
+ .createWithDefault("~/.ivy2.5.2")
private[spark] val JAR_IVY_SETTING_PATH =
ConfigBuilder("spark.jars.ivySettings")
diff --git a/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala b/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala
index 0dcdba3..50a7d97 100644
--- a/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala
@@ -378,7 +378,8 @@ private[deploy] object IvyTestUtils {
f(repo.toURI.toString)
} finally {
// Clean up
- if (repo.toString.contains(".m2") || repo.toString.contains(".ivy2")) {
+ if (repo.toString.contains(".m2") || repo.toString.contains(".ivy2") ||
+ repo.toString.contains(".ivy2.5.2")) {
val groupDir = getBaseGroupDirectory(artifact, useIvyLayout)
FileUtils.deleteDirectory(new File(repo, groupDir + File.separator + artifact.artifactId))
deps.foreach { _.foreach { dep =>
diff --git a/dev/deps/spark-deps-hadoop-3-hive-2.3 b/dev/deps/spark-deps-hadoop-3-hive-2.3
index 4d94cb5..844118d 100644
--- a/dev/deps/spark-deps-hadoop-3-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-3-hive-2.3
@@ -96,7 +96,7 @@ httpclient/4.5.14//httpclient-4.5.14.jar
httpcore/4.4.16//httpcore-4.4.16.jar
ini4j/0.5.4//ini4j-0.5.4.jar
istack-commons-runtime/3.0.8//istack-commons-runtime-3.0.8.jar
-ivy/2.5.1//ivy-2.5.1.jar
+ivy/2.5.2//ivy-2.5.2.jar
jackson-annotations/2.14.2//jackson-annotations-2.14.2.jar
jackson-core-asl/1.9.13//jackson-core-asl-1.9.13.jar
jackson-core/2.14.2//jackson-core-2.14.2.jar
diff --git a/dev/run-tests.py b/dev/run-tests.py
index dab3dcf..c0270a6 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -473,6 +473,8 @@ def main():
rm_r(os.path.join(SPARK_HOME, "work"))
rm_r(os.path.join(USER_HOME, ".ivy2", "local", "org.apache.spark"))
rm_r(os.path.join(USER_HOME, ".ivy2", "cache", "org.apache.spark"))
+ rm_r(os.path.join(USER_HOME, ".ivy2.5.2", "local", "org.apache.spark"))
+ rm_r(os.path.join(USER_HOME, ".ivy2.5.2", "cache", "org.apache.spark"))
os.environ["CURRENT_BLOCK"] = str(ERROR_CODES["BLOCK_GENERAL"])
diff --git a/pom.xml b/pom.xml
index a49574e..4f681d8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -139,14 +139,14 @@
<!-- note that this should be compatible with Kafka brokers version 0.10 and up -->
<kafka.version>3.3.2</kafka.version>
<!-- After 10.17.1.0, the minimum required version is JDK19 -->
- <derby.version>10.16.1.1</derby.version>
+ <derby.version>10.16.1.2</derby.version>
<parquet.version>1.13.1</parquet.version>
<orc.version>1.9.1</orc.version>
<orc.classifier>shaded-protobuf</orc.classifier>
<jetty.version>9.4.54.v20240208</jetty.version>
<jakartaservlet.version>4.0.3</jakartaservlet.version>
<chill.version>0.10.0</chill.version>
- <ivy.version>2.5.1</ivy.version>
+ <ivy.version>2.5.2</ivy.version>
<oro.version>2.0.8</oro.version>
<!--
If you changes codahale.metrics.version, you also need to change
@@ -188,7 +188,7 @@
<snappy.version>1.1.10.4</snappy.version>
<netlib.ludovic.dev.version>3.0.3</netlib.ludovic.dev.version>
<commons-codec.version>1.15</commons-codec.version>
- <commons-compress.version>1.22</commons-compress.version>
+ <commons-compress.version>1.26.1</commons-compress.version>
<commons-io.version>2.11.0</commons-io.version>
<!-- org.apache.commons/commons-lang/-->
<commons-lang2.version>2.6</commons-lang2.version>
@@ -203,7 +203,7 @@
<joda.version>2.12.2</joda.version>
<jodd.version>3.5.2</jodd.version>
<jsr305.version>3.0.0</jsr305.version>
- <libthrift.version>0.12.0</libthrift.version>
+ <libthrift.version>0.14.2</libthrift.version>
<!-- Please don't upgrade the version to 4.10+, it depends on JDK 11 -->
<antlr4.version>4.9.3</antlr4.version>
<jpam.version>1.1</jpam.version>
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/KerberosSaslHelper.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/KerberosSaslHelper.java
index 175412e..ef91f94 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/KerberosSaslHelper.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/KerberosSaslHelper.java
@@ -30,6 +30,7 @@ import org.apache.thrift.TProcessor;
import org.apache.thrift.TProcessorFactory;
import org.apache.thrift.transport.TSaslClientTransport;
import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
public final class KerberosSaslHelper {
@@ -68,8 +69,8 @@ public final class KerberosSaslHelper {
new TSaslClientTransport("GSSAPI", null, names[0], names[1], saslProps, null,
underlyingTransport);
return new TSubjectAssumingTransport(saslTransport);
- } catch (SaslException se) {
- throw new IOException("Could not instantiate SASL transport", se);
+ } catch (SaslException | TTransportException se) {
+ throw new IOException("Could not instantiate transport", se);
}
}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslHelper.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslHelper.java
index c06f6ec..5ac2995 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslHelper.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/PlainSaslHelper.java
@@ -38,6 +38,7 @@ import org.apache.thrift.TProcessorFactory;
import org.apache.thrift.transport.TSaslClientTransport;
import org.apache.thrift.transport.TSaslServerTransport;
import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.TTransportFactory;
public final class PlainSaslHelper {
@@ -64,7 +65,7 @@ public final class PlainSaslHelper {
}
public static TTransport getPlainTransport(String username, String password,
- TTransport underlyingTransport) throws SaslException {
+ TTransport underlyingTransport) throws SaslException, TTransportException {
return new TSaslClientTransport("PLAIN", null, null, null, new HashMap<String, String>(),
new PlainCallbackHandler(username, password), underlyingTransport);
}
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
index 1205d21..b727b4e 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
@@ -45,11 +45,12 @@ public class TSetIpAddressProcessor<I extends Iface> extends TCLIService.Process
}
@Override
- public boolean process(final TProtocol in, final TProtocol out) throws TException {
+ public void process(final TProtocol in, final TProtocol out) throws TException {
setIpAddress(in);
setUserName(in);
try {
- return super.process(in, out);
+ super.process(in, out);
+ return;
} finally {
THREAD_LOCAL_USER_NAME.remove();
THREAD_LOCAL_IP_ADDRESS.remove();
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
index a980b51..025c85e 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java
@@ -90,16 +90,10 @@ public class ThriftBinaryCLIService extends ThriftCLIService {
// Server args
int maxMessageSize = hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_MAX_MESSAGE_SIZE);
- int requestTimeout = (int) hiveConf.getTimeVar(
- HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_TIMEOUT, TimeUnit.SECONDS);
- int beBackoffSlotLength = (int) hiveConf.getTimeVar(
- HiveConf.ConfVars.HIVE_SERVER2_THRIFT_LOGIN_BEBACKOFF_SLOT_LENGTH, TimeUnit.MILLISECONDS);
TThreadPoolServer.Args sargs = new TThreadPoolServer.Args(serverSocket)
.processorFactory(processorFactory).transportFactory(transportFactory)
.protocolFactory(new TBinaryProtocol.Factory())
.inputProtocolFactory(new TBinaryProtocol.Factory(true, true, maxMessageSize, maxMessageSize))
- .requestTimeout(requestTimeout).requestTimeoutUnit(TimeUnit.SECONDS)
- .beBackoffSlotLength(beBackoffSlotLength).beBackoffSlotLengthUnit(TimeUnit.MILLISECONDS)
.executorService(executorService);
// TCP Server
diff --git a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index 3517df9..1c6f0de 100644
--- a/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ b/sql/hive-thriftserver/src/main/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -83,6 +83,16 @@ public abstract class ThriftCLIService extends AbstractService implements TCLISe
public SessionHandle getSessionHandle() {
return sessionHandle;
}
+
+ @Override
+ public <T> T unwrap(Class<T> aClass) {
+ return null;
+ }
+
+ @Override
+ public boolean isWrapperFor(Class<?> aClass) {
+ return false;
+ }
}
public ThriftCLIService(CLIService service, String serviceName) {