← Back to team overview

data-platform team mailing list archive

[Merge] soss/+source/hadoop:release-3.3.6-ubuntu3 into soss/+source/hadoop:lp-3.3.6

 

Fabian Toepfer has proposed merging soss/+source/hadoop:release-3.3.6-ubuntu3 into soss/+source/hadoop:lp-3.3.6.

Requested reviews:
  Canonical Data Platform (data-platform)

For more details, see:
https://code.launchpad.net/~data-platform/soss/+source/hadoop/+git/hadoop/+merge/476967
-- 
Your team Canonical Data Platform is requested to review the proposed merge of soss/+source/hadoop:release-3.3.6-ubuntu3 into soss/+source/hadoop:lp-3.3.6.
diff --git a/LICENSE-binary b/LICENSE-binary
index ed04c84..39dcfb8 100644
--- a/LICENSE-binary
+++ b/LICENSE-binary
@@ -213,8 +213,7 @@ com.aliyun:aliyun-java-sdk-core:4.5.10
 com.aliyun:aliyun-java-sdk-kms:2.11.0
 com.aliyun:aliyun-java-sdk-ram:3.1.0
 com.aliyun:aliyun-java-sdk-sts:3.0.0
-com.aliyun.oss:aliyun-sdk-oss:3.13.0
-com.amazonaws:aws-java-sdk-bundle:1.12.367
+com.aliyun.oss:aliyun-sdk-oss:3.13.2
 com.cedarsoftware:java-util:1.9.0
 com.cedarsoftware:json-io:2.5.1
 com.fasterxml.jackson.core:jackson-annotations:2.12.7
@@ -242,7 +241,7 @@ com.google.guava:guava:jar:30.1.1-jre
 com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava
 com.google.j2objc:j2objc-annotations:1.3
 com.microsoft.azure:azure-storage:7.0.1
-com.nimbusds:nimbus-jose-jwt:9.8.1
+com.nimbusds:nimbus-jose-jwt:9.37.2
 com.yammer.metrics:metrics-core:2.2.0
 com.zaxxer:HikariCP-java7:2.4.12
 commons-beanutils:commons-beanutils:1.9.4
@@ -302,7 +301,7 @@ io.swagger:swagger-annotations:1.5.4
 javax.inject:javax.inject:1
 net.java.dev.jna:jna:5.2.0
 net.minidev:accessors-smart:2.4.7
-org.apache.avro:avro:1.9.2
+org.apache.avro:avro:1.11.4
 org.apache.commons:commons-collections4:4.2
 org.apache.commons:commons-compress:1.26.1
 org.apache.commons:commons-configuration2:2.10.1
@@ -364,6 +363,11 @@ org.objenesis:objenesis:2.6
 org.xerial.snappy:snappy-java:1.1.8.2
 org.yaml:snakeyaml:2.0
 org.wildfly.openssl:wildfly-openssl:1.1.3.Final
+<<<<<<< HEAD
+software.amazon.awssdk:bundle:jar:2.20.160
+=======
+software.amazon.awssdk:bundle:jar:2.23.5
+>>>>>>> d274f778c15 (HADOOP-19046. S3A: update AWS V2 SDK to 2.23.5; v1 to 1.12.599 (#6467))
 
 
 --------------------------------------------------------------------------------
@@ -380,7 +384,7 @@ hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/util/tree
 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/compat/{fstatat|openat|unlinkat}.h
 
 com.github.luben:zstd-jni:1.4.9-1
-dnsjava:dnsjava:2.1.7
+dnsjava:dnsjava:3.6.1
 org.codehaus.woodstox:stax2-api:4.2.1
 
 
diff --git a/NOTICE-binary b/NOTICE-binary
index 1969f71..478dd09 100644
--- a/NOTICE-binary
+++ b/NOTICE-binary
@@ -66,7 +66,7 @@ available from http://www.digip.org/jansson/.
 
 
 AWS SDK for Java
-Copyright 2010-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
+Copyright 2010-2024 Amazon.com, Inc. or its affiliates. All Rights Reserved.
 
 This product includes software developed by
 Amazon Technologies, Inc (http://www.amazon.com/).
diff --git a/hadoop-client-modules/hadoop-client-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh b/hadoop-client-modules/hadoop-client-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh
index 2e92740..3a7c5ce 100644
--- a/hadoop-client-modules/hadoop-client-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh
+++ b/hadoop-client-modules/hadoop-client-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh
@@ -51,6 +51,8 @@ allowed_expr+="|^[^-]*-default.xml$"
 allowed_expr+="|^[^-]*-version-info.properties$"
 #   * Hadoop's application classloader properties file.
 allowed_expr+="|^org.apache.hadoop.application-classloader.properties$"
+# Comes from dnsjava, not sure if relocatable.
+allowed_expr+="|^messages.properties$"
 # public suffix list used by httpcomponents
 allowed_expr+="|^mozilla/$"
 allowed_expr+="|^mozilla/public-suffix-list.txt$"
diff --git a/hadoop-client-modules/hadoop-client-runtime/pom.xml b/hadoop-client-modules/hadoop-client-runtime/pom.xml
index 59fbb7b..6b0d9a0 100644
--- a/hadoop-client-modules/hadoop-client-runtime/pom.xml
+++ b/hadoop-client-modules/hadoop-client-runtime/pom.xml
@@ -237,6 +237,8 @@
                         <exclude>jnamed*</exclude>
                         <exclude>lookup*</exclude>
                         <exclude>update*</exclude>
+                        <exclude>META-INF/versions/21/*</exclude>
+                        <exclude>META-INF/versions/21/**/*</exclude>
                       </excludes>
                     </filter>
                     <filter>
@@ -251,6 +253,7 @@
                       <excludes>
                         <exclude>META-INF/versions/9/module-info.class</exclude>
                         <exclude>META-INF/versions/11/module-info.class</exclude>
+                        <exclude>META-INF/versions/21/module-info.class</exclude>
                       </excludes>
                     </filter>
 
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 0f3dc7a..1cd9f0c 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -40,7 +40,7 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.hadoop.thirdparty</groupId>
-      <artifactId>hadoop-shaded-protobuf_3_7</artifactId>
+      <artifactId>hadoop-shaded-protobuf_3_25</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
index 3b20138..0da1997 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
@@ -623,11 +623,8 @@ public final class SecurityUtil {
     private List<String> searchDomains = new ArrayList<>();
     {
       ResolverConfig resolverConfig = ResolverConfig.getCurrentConfig();
-      Name[] names = resolverConfig.searchPath();
-      if (names != null) {
-        for (Name name : names) {
-          searchDomains.add(name.toString());
-        }
+      for (Name name : resolverConfig.searchPath()) {
+        searchDomains.add(name.toString());
       }
     }
 
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/constants/ConfigConstants.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/constants/ConfigConstants.java
new file mode 100644
index 0000000..0a6c5a7
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/constants/ConfigConstants.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.constants;
+
+/**
+ * Evolving config constants class used in various hadoop tests.
+ */
+public final class ConfigConstants {
+
+  private ConfigConstants() {}
+
+ /**
+  * System property name for the avro dependency.
+  * This property is used to configure trusted packages,
+  * which the avro dependency can use for serialization.
+  */
+  public static final String CONFIG_AVRO_SERIALIZABLE_PACKAGES =
+      "org.apache.avro.SERIALIZABLE_PACKAGES";
+}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/constants/package-info.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/constants/package-info.java
new file mode 100644
index 0000000..19ad181
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/constants/package-info.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Evolving config constants class used in various hadoop tests.
+ */
+package org.apache.hadoop.constants;
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
index e775d1d..4204faa 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
@@ -17,6 +17,7 @@
  */
 
 package org.apache.hadoop.fs;
+
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -30,6 +31,7 @@ import java.net.URISyntaxException;
 import java.util.Arrays;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.constants.ConfigConstants;
 import org.apache.hadoop.io.AvroTestUtil;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Shell;
@@ -404,6 +406,8 @@ public class TestPath {
   
   @Test (timeout = 30000)
   public void testAvroReflect() throws Exception {
+    // Avro expects explicitely stated, trusted packages used for (de-)serialization
+    System.setProperty(ConfigConstants.CONFIG_AVRO_SERIALIZABLE_PACKAGES, "org.apache.hadoop.fs");
     AvroTestUtil.testReflect
       (new Path("foo"),
        "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.fs.Path\"}");
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
index 0fb20ac..3ac9289 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
@@ -23,6 +23,8 @@ import java.nio.BufferUnderflowException;
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
 import java.util.Random;
+
+import org.apache.hadoop.constants.ConfigConstants;
 import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
 import org.apache.hadoop.thirdparty.com.google.common.primitives.Bytes;
 import org.junit.Test;
@@ -337,6 +339,8 @@ public class TestText {
 
   @Test
   public void testAvroReflect() throws Exception {
+    // Avro expects explicitely stated, trusted packages used for (de-)serialization
+    System.setProperty(ConfigConstants.CONFIG_AVRO_SERIALIZABLE_PACKAGES, "org.apache.hadoop.io");
     AvroTestUtil.testReflect
             (new Text("foo"),
                     "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.io.Text\"}");
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
index b2d2a8d..97e281b 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
@@ -33,7 +33,7 @@ public class TestAvroSerialization {
   @Test
   public void testSpecific() throws Exception {
     AvroRecord before = new AvroRecord();
-    before.intField = 5;
+    before.setIntField(5);
     AvroRecord after = SerializationTestUtil.testSerialization(conf, before);
     assertEquals(before, after);
   }
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
index eeee581..06d4d59 100644
--- a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
@@ -75,7 +75,6 @@ import java.net.NetworkInterface;
 import java.net.Socket;
 import java.net.SocketAddress;
 import java.net.SocketException;
-import java.net.UnknownHostException;
 import java.nio.BufferUnderflowException;
 import java.nio.ByteBuffer;
 import java.nio.channels.DatagramChannel;
@@ -87,8 +86,10 @@ import java.security.PrivateKey;
 import java.security.spec.InvalidKeySpecException;
 import java.security.spec.RSAPrivateKeySpec;
 import java.text.SimpleDateFormat;
+import java.time.Duration;
+import java.time.Instant;
+import java.time.temporal.ChronoUnit;
 import java.util.ArrayList;
-import java.util.Calendar;
 import java.util.Collection;
 import java.util.Date;
 import java.util.Enumeration;
@@ -232,13 +233,7 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
       } catch (SocketException e) {
       }
       ResolverConfig.refresh();
-      ExtendedResolver resolver;
-      try {
-        resolver = new ExtendedResolver();
-      } catch (UnknownHostException e) {
-        LOG.error("Can not resolve DNS servers: ", e);
-        return;
-      }
+      ExtendedResolver resolver = new ExtendedResolver();
       for (Resolver check : resolver.getResolvers()) {
         if (check instanceof SimpleResolver) {
           InetAddress address = ((SimpleResolver) check).getAddress()
@@ -247,7 +242,7 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
             resolver.deleteResolver(check);
             continue;
           } else {
-            check.setTimeout(30);
+            check.setTimeout(Duration.ofSeconds(30));
           }
         } else {
           LOG.error("Not simple resolver!!!?" + check);
@@ -260,12 +255,10 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
       }
       StringBuilder message = new StringBuilder();
       message.append("DNS servers: ");
-      if (ResolverConfig.getCurrentConfig().servers() != null) {
-        for (String server : ResolverConfig.getCurrentConfig()
-            .servers()) {
-          message.append(server);
-          message.append(" ");
-        }
+      for (InetSocketAddress address :
+          ResolverConfig.getCurrentConfig().servers()) {
+        message.append(address);
+        message.append(" ");
       }
       LOG.info(message.toString());
     }
@@ -331,11 +324,10 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
     if (isDNSSECEnabled()) {
       Collection<Zone> zoneCollection = zones.values();
       for (Zone zone : zoneCollection) {
-        Iterator itor = zone.iterator();
+        Iterator<RRset> itor = zone.iterator();
         while (itor.hasNext()) {
-          RRset rRset = (RRset) itor.next();
-          Iterator sigs = rRset.sigs();
-          if (!sigs.hasNext()) {
+          RRset rRset = itor.next();
+          if (!rRset.sigs().isEmpty()) {
             try {
               signSiteRecord(zone, rRset.first());
             } catch (DNSSEC.DNSSECException e) {
@@ -692,10 +684,8 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
       throws DNSSEC.DNSSECException {
     RRset rrset = zone.findExactMatch(record.getName(),
         record.getType());
-    Calendar cal = Calendar.getInstance();
-    Date inception = cal.getTime();
-    cal.add(Calendar.YEAR, 1);
-    Date expiration = cal.getTime();
+    Instant inception = Instant.now();
+    Instant expiration = inception.plus(365, ChronoUnit.DAYS);
     RRSIGRecord rrsigRecord =
         DNSSEC.sign(rrset, dnsKeyRecs.get(zone.getOrigin()),
             privateKey, inception, expiration);
@@ -1159,7 +1149,7 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
           }
         }
         if (r.getType() == Type.CNAME) {
-          Name cname = ((CNAMERecord) r).getAlias();
+          Name cname = r.getName();
           if (iterations < 6) {
             remoteLookup(response, cname, type, iterations + 1);
           }
@@ -1255,9 +1245,7 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
    * @param flags the flags.
    */
   private void addAdditional2(Message response, int section, int flags) {
-    Record[] records = response.getSectionArray(section);
-    for (int i = 0; i < records.length; i++) {
-      Record r = records[i];
+    for (Record r : response.getSection(section)) {
       Name glueName = r.getAdditionalName();
       if (glueName != null) {
         addGlue(response, glueName, flags);
@@ -1403,11 +1391,10 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
           response.getHeader().setFlag(Flags.AA);
         }
       } else if (sr.isSuccessful()) {
-        RRset[] rrsets = sr.answers();
+        List<RRset> rrsets = sr.answers();
         LOG.info("found answers {}", rrsets);
-        for (int i = 0; i < rrsets.length; i++) {
-          addRRset(name, response, rrsets[i],
-              Section.ANSWER, flags);
+        for (RRset rrset : rrsets) {
+          addRRset(name, response, rrset, Section.ANSWER, flags);
         }
         addNS(response, zone, flags);
         if (iterations == 0) {
@@ -1456,7 +1443,7 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
   private void addNXT(Message response, int flags)
       throws DNSSEC.DNSSECException, IOException {
     Record nxtRecord = getNXTRecord(
-        response.getSectionArray(Section.QUESTION)[0]);
+        response.getSection(Section.QUESTION).get(0));
     Zone zone = findBestZone(nxtRecord.getName());
     addRecordCommand.exec(zone, nxtRecord);
     RRset nxtRR = zone.findExactMatch(nxtRecord.getName(), Type.NXT);
@@ -1515,9 +1502,7 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
       }
     }
     if ((flags & FLAG_SIGONLY) == 0) {
-      Iterator it = rrset.rrs();
-      while (it.hasNext()) {
-        Record r = (Record) it.next();
+      for (Record r : rrset.rrs()) {
         if (r.getName().isWild() && !name.isWild()) {
           r = r.withName(name);
         }
@@ -1525,9 +1510,7 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
       }
     }
     if ((flags & (FLAG_SIGONLY | FLAG_DNSSECOK)) != 0) {
-      Iterator it = rrset.sigs();
-      while (it.hasNext()) {
-        Record r = (Record) it.next();
+      for (Record r : rrset.sigs()) {
         if (r.getName().isWild() && !name.isWild()) {
           r = r.withName(name);
         }
@@ -1554,13 +1537,13 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
     if (zone == null) {
       return errorMessage(query, Rcode.REFUSED);
     }
-    Iterator it = zone.AXFR();
+    Iterator<RRset> it = zone.AXFR();
     try {
       DataOutputStream dataOut;
       dataOut = new DataOutputStream(s.getOutputStream());
       int id = query.getHeader().getID();
       while (it.hasNext()) {
-        RRset rrset = (RRset) it.next();
+        RRset rrset = it.next();
         Message response = new Message(id);
         Header header = response.getHeader();
         header.setFlag(Flags.QR);
@@ -1568,7 +1551,7 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
         addRRset(rrset.getName(), response, rrset,
             Section.ANSWER, FLAG_DNSSECOK);
         if (tsig != null) {
-          tsig.applyStream(response, qtsig, first);
+          tsig.apply(response, qtsig, first);
           qtsig = response.getTSIG();
         }
         first = false;
@@ -1688,10 +1671,8 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
           zone.addRecord(record);
           LOG.info("Registered {}", record);
           if (isDNSSECEnabled()) {
-            Calendar cal = Calendar.getInstance();
-            Date inception = cal.getTime();
-            cal.add(Calendar.YEAR, 1);
-            Date expiration = cal.getTime();
+            Instant inception = Instant.now();
+            Instant expiration = inception.plus(365, ChronoUnit.DAYS);
             RRset rRset =
                 zone.findExactMatch(record.getName(), record.getType());
             try {
@@ -1700,7 +1681,7 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
                   DNSSEC.sign(rRset, dnskeyRecord, privateKey,
                       inception, expiration);
               LOG.info("Adding {}", rrsigRecord);
-              rRset.addRR(rrsigRecord);
+              zone.addRecord(rrsigRecord);
 
               //addDSRecord(zone, record.getName(), record.getDClass(),
               //  record.getTTL(), inception, expiration);
@@ -1727,8 +1708,8 @@ public class RegistryDNS extends AbstractService implements DNSOperations,
      */
     private void addDSRecord(Zone zone,
         Name name, int dClass, long dsTtl,
-        Date inception,
-        Date expiration) throws DNSSEC.DNSSECException {
+        Instant inception,
+        Instant expiration) throws DNSSEC.DNSSECException {
       RRset rRset;
       RRSIGRecord rrsigRecord;
 
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/SecureableZone.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/SecureableZone.java
index 4b0a852..c2f6532 100644
--- a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/SecureableZone.java
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/SecureableZone.java
@@ -138,8 +138,7 @@ public class SecureableZone extends Zone {
     SetResponse sr = zone.findRecords(base.getName(), Type.ANY);
     BitSet bitMap = new BitSet();
     bitMap.set(Type.NXT);
-    RRset[] rRsets = sr.answers();
-    for (RRset rRset : rRsets) {
+    for (RRset rRset : sr.answers()) {
       int typeCode = rRset.getType();
       if (typeCode > 0 && typeCode < 128) {
         bitMap.set(typeCode);
diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
index a0c4ca3..386cb3a 100644
--- a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
+++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/server/dns/TestRegistryDNS.java
@@ -51,8 +51,9 @@ import java.net.InetAddress;
 import java.security.KeyFactory;
 import java.security.PrivateKey;
 import java.security.spec.RSAPrivateKeySpec;
-import java.util.Calendar;
-import java.util.Date;
+import java.time.Instant;
+import java.time.temporal.ChronoUnit;
+import java.util.List;
 import java.util.concurrent.TimeUnit;
 
 import static org.apache.hadoop.registry.client.api.RegistryConstants.*;
@@ -194,34 +195,37 @@ public class TestRegistryDNS extends Assert {
         "/registry/users/root/services/org-apache-slider/test1/", record);
 
     // start assessing whether correct records are available
-    Record[] recs = assertDNSQuery("test1.root.dev.test.");
+    List<Record> recs = assertDNSQuery("test1.root.dev.test.");
     assertEquals("wrong result", "192.168.1.5",
-        ((ARecord) recs[0]).getAddress().getHostAddress());
+        ((ARecord) recs.get(0)).getAddress().getHostAddress());
 
     recs = assertDNSQuery("management-api.test1.root.dev.test.", 2);
     assertEquals("wrong target name", "test1.root.dev.test.",
-        ((CNAMERecord) recs[0]).getTarget().toString());
-    assertTrue("not an ARecord", recs[isSecure() ? 2 : 1] instanceof ARecord);
+        ((CNAMERecord) recs.get(0)).getTarget().toString());
+    assertTrue("not an ARecord",
+        recs.get(isSecure() ? 2 : 1) instanceof ARecord);
 
     recs = assertDNSQuery("appmaster-ipc-api.test1.root.dev.test.",
         Type.SRV, 1);
-    assertTrue("not an SRV record", recs[0] instanceof SRVRecord);
-    assertEquals("wrong port", 1026, ((SRVRecord) recs[0]).getPort());
+    assertTrue("not an SRV record", recs.get(0) instanceof SRVRecord);
+    assertEquals("wrong port", 1026, ((SRVRecord) recs.get(0)).getPort());
 
     recs = assertDNSQuery("appmaster-ipc-api.test1.root.dev.test.", 2);
     assertEquals("wrong target name", "test1.root.dev.test.",
-        ((CNAMERecord) recs[0]).getTarget().toString());
-    assertTrue("not an ARecord", recs[isSecure() ? 2 : 1] instanceof ARecord);
+        ((CNAMERecord) recs.get(0)).getTarget().toString());
+    assertTrue("not an ARecord",
+        recs.get(isSecure() ? 2 : 1) instanceof ARecord);
 
     recs = assertDNSQuery("http-api.test1.root.dev.test.", 2);
     assertEquals("wrong target name", "test1.root.dev.test.",
-        ((CNAMERecord) recs[0]).getTarget().toString());
-    assertTrue("not an ARecord", recs[isSecure() ? 2 : 1] instanceof ARecord);
+        ((CNAMERecord) recs.get(0)).getTarget().toString());
+    assertTrue("not an ARecord",
+        recs.get(isSecure() ? 2 : 1) instanceof ARecord);
 
     recs = assertDNSQuery("http-api.test1.root.dev.test.", Type.SRV,
         1);
-    assertTrue("not an SRV record", recs[0] instanceof SRVRecord);
-    assertEquals("wrong port", 1027, ((SRVRecord) recs[0]).getPort());
+    assertTrue("not an SRV record", recs.get(0) instanceof SRVRecord);
+    assertEquals("wrong port", 1027, ((SRVRecord) recs.get(0)).getPort());
 
     assertDNSQuery("test1.root.dev.test.", Type.TXT, 3);
     assertDNSQuery("appmaster-ipc-api.test1.root.dev.test.", Type.TXT, 1);
@@ -239,13 +243,13 @@ public class TestRegistryDNS extends Assert {
         record);
 
     // start assessing whether correct records are available
-    Record[] recs =
+    List<Record> recs =
         assertDNSQuery("ctr-e50-1451931954322-0016-01-000002.dev.test.");
     assertEquals("wrong result", "172.17.0.19",
-        ((ARecord) recs[0]).getAddress().getHostAddress());
+        ((ARecord) recs.get(0)).getAddress().getHostAddress());
 
     recs = assertDNSQuery("httpd-1.test1.root.dev.test.", 1);
-    assertTrue("not an ARecord", recs[0] instanceof ARecord);
+    assertTrue("not an ARecord", recs.get(0) instanceof ARecord);
   }
 
   @Test
@@ -277,16 +281,16 @@ public class TestRegistryDNS extends Assert {
         record);
 
     // start assessing whether correct records are available
-    Record[] recs = assertDNSQuery(
+    List<Record> recs = assertDNSQuery(
         "ctr-e50-1451931954322-0016-01-000002.dev.test.");
     assertEquals("wrong result", "172.17.0.19",
-        ((ARecord) recs[0]).getAddress().getHostAddress());
-    assertEquals("wrong ttl", 30L, recs[0].getTTL());
+        ((ARecord) recs.get(0)).getAddress().getHostAddress());
+    assertEquals("wrong ttl", 30L, recs.get(0).getTTL());
 
     recs = assertDNSQuery("httpd-1.test1.root.dev.test.", 1);
-    assertTrue("not an ARecord", recs[0] instanceof ARecord);
+    assertTrue("not an ARecord", recs.get(0) instanceof ARecord);
 
-    assertEquals("wrong ttl", 30L, recs[0].getTTL());
+    assertEquals("wrong ttl", 30L, recs.get(0).getTTL());
   }
 
   @Test
@@ -299,10 +303,11 @@ public class TestRegistryDNS extends Assert {
         record);
 
     // start assessing whether correct records are available
-    Record[] recs = assertDNSQuery("19.0.17.172.in-addr.arpa.", Type.PTR, 1);
+    List<Record> recs = assertDNSQuery(
+        "19.0.17.172.in-addr.arpa.", Type.PTR, 1);
     assertEquals("wrong result",
         "httpd-1.test1.root.dev.test.",
-        ((PTRRecord) recs[0]).getTarget().toString());
+        ((PTRRecord) recs.get(0)).getTarget().toString());
   }
 
   @Test
@@ -325,10 +330,11 @@ public class TestRegistryDNS extends Assert {
         record);
 
     // start assessing whether correct records are available
-    Record[] recs = assertDNSQuery("19.0.17.172.in-addr.arpa.", Type.PTR, 1);
+    List<Record> recs = assertDNSQuery(
+        "19.0.17.172.in-addr.arpa.", Type.PTR, 1);
     assertEquals("wrong result",
         "httpd-1.test1.root.dev.test.",
-        ((PTRRecord) recs[0]).getTarget().toString());
+        ((PTRRecord) recs.get(0)).getTarget().toString());
   }
 
   @Test
@@ -344,7 +350,7 @@ public class TestRegistryDNS extends Assert {
     Name name = Name.fromString("19.1.17.172.in-addr.arpa.");
     Record question = Record.newRecord(name, Type.PTR, DClass.IN);
     Message query = Message.newQuery(question);
-    OPTRecord optRecord = new OPTRecord(4096, 0, 0, Flags.DO, null);
+    OPTRecord optRecord = new OPTRecord(4096, 0, 0, Flags.DO);
     query.addRecord(optRecord, Section.ADDITIONAL);
     byte[] responseBytes = getRegistryDNS().generateReply(query, null);
     Message response = new Message(responseBytes);
@@ -372,21 +378,21 @@ public class TestRegistryDNS extends Assert {
     assertEquals("wrong status", Rcode.NXDOMAIN, response.getRcode());
   }
 
-  private Record[] assertDNSQuery(String lookup) throws IOException {
+  private List<Record> assertDNSQuery(String lookup) throws IOException {
     return assertDNSQuery(lookup, Type.A, 1);
   }
 
-  private Record[] assertDNSQuery(String lookup, int numRecs)
+  private List<Record> assertDNSQuery(String lookup, int numRecs)
       throws IOException {
     return assertDNSQuery(lookup, Type.A, numRecs);
   }
 
-  Record[] assertDNSQuery(String lookup, int type, int numRecs)
+  private List<Record> assertDNSQuery(String lookup, int type, int numRecs)
       throws IOException {
     Name name = Name.fromString(lookup);
     Record question = Record.newRecord(name, type, DClass.IN);
     Message query = Message.newQuery(question);
-    OPTRecord optRecord = new OPTRecord(4096, 0, 0, Flags.DO, null);
+    OPTRecord optRecord = new OPTRecord(4096, 0, 0, Flags.DO);
     query.addRecord(optRecord, Section.ADDITIONAL);
     byte[] responseBytes = getRegistryDNS().generateReply(query, null);
     Message response = new Message(responseBytes);
@@ -394,9 +400,9 @@ public class TestRegistryDNS extends Assert {
     assertNotNull("Null response", response);
     assertEquals("Questions do not match", query.getQuestion(),
         response.getQuestion());
-    Record[] recs = response.getSectionArray(Section.ANSWER);
+    List<Record> recs = response.getSection(Section.ANSWER);
     assertEquals("wrong number of answer records",
-        isSecure() ? numRecs * 2 : numRecs, recs.length);
+        isSecure() ? numRecs * 2 : numRecs, recs.size());
     if (isSecure()) {
       boolean signed = false;
       for (Record record : recs) {
@@ -410,12 +416,12 @@ public class TestRegistryDNS extends Assert {
     return recs;
   }
 
-  Record[] assertDNSQueryNotNull(String lookup, int type, int answerCount)
-      throws IOException {
+  private List<Record> assertDNSQueryNotNull(
+      String lookup, int type, int answerCount) throws IOException {
     Name name = Name.fromString(lookup);
     Record question = Record.newRecord(name, type, DClass.IN);
     Message query = Message.newQuery(question);
-    OPTRecord optRecord = new OPTRecord(4096, 0, 0, Flags.DO, null);
+    OPTRecord optRecord = new OPTRecord(4096, 0, 0, Flags.DO);
     query.addRecord(optRecord, Section.ADDITIONAL);
     byte[] responseBytes = getRegistryDNS().generateReply(query, null);
     Message response = new Message(responseBytes);
@@ -423,9 +429,9 @@ public class TestRegistryDNS extends Assert {
     assertNotNull("Null response", response);
     assertEquals("Questions do not match", query.getQuestion(),
         response.getQuestion());
-    Record[] recs = response.getSectionArray(Section.ANSWER);
-    assertEquals(answerCount, recs.length);
-    assertEquals(recs[0].getType(), type);
+    List<Record> recs = response.getSection(Section.ANSWER);
+    assertEquals(answerCount, recs.size());
+    assertEquals(type, recs.get(0).getType());
     return recs;
   }
 
@@ -461,10 +467,8 @@ public class TestRegistryDNS extends Assert {
 
     ARecord aRecord = new ARecord(Name.fromString("some.test."), DClass.IN, 0,
         InetAddress.getByName("192.168.0.1"));
-    Calendar cal = Calendar.getInstance();
-    Date inception = cal.getTime();
-    cal.add(Calendar.YEAR, 1);
-    Date expiration = cal.getTime();
+    Instant inception = Instant.now();
+    Instant expiration = inception.plus(365, ChronoUnit.DAYS);
     RRset rrset = new RRset(aRecord);
     RRSIGRecord rrsigRecord = DNSSEC.sign(rrset,
         dnskeyRecord,
@@ -495,13 +499,13 @@ public class TestRegistryDNS extends Assert {
         record);
 
     // start assessing whether correct records are available
-    Record[] recs = assertDNSQuery(
+    List<Record> recs = assertDNSQuery(
         "ctr-e50-1451931954322-0016-01-000002.dev.test.", Type.AAAA, 1);
     assertEquals("wrong result", "172.17.0.19",
-        ((AAAARecord) recs[0]).getAddress().getHostAddress());
+        ((AAAARecord) recs.get(0)).getAddress().getHostAddress());
 
     recs = assertDNSQuery("httpd-1.test1.root.dev.test.", Type.AAAA, 1);
-    assertTrue("not an ARecord", recs[0] instanceof AAAARecord);
+    assertTrue("not an ARecord", recs.get(0) instanceof AAAARecord);
   }
 
   @Test
@@ -524,9 +528,9 @@ public class TestRegistryDNS extends Assert {
     assertNotNull("Null response", response);
     assertEquals("Questions do not match", query.getQuestion(),
         response.getQuestion());
-    Record[] sectionArray = response.getSectionArray(Section.AUTHORITY);
+    List<Record> sectionArray = response.getSection(Section.AUTHORITY);
     assertEquals("Wrong number of recs in AUTHORITY", isSecure() ? 2 : 1,
-        sectionArray.length);
+        sectionArray.size());
     boolean soaFound = false;
     for (Record rec : sectionArray) {
       soaFound = rec.getType() == Type.SOA;
@@ -570,31 +574,31 @@ public class TestRegistryDNS extends Assert {
         record);
 
     // start assessing whether correct records are available
-    Record[] recs =
+    List<Record> recs =
         assertDNSQuery("ctr-e50-1451931954322-0016-01-000002.dev.test.");
     assertEquals("wrong result", "172.17.0.19",
-        ((ARecord) recs[0]).getAddress().getHostAddress());
+        ((ARecord) recs.get(0)).getAddress().getHostAddress());
 
     recs = assertDNSQuery("httpd-1.test1.root.dev.test.", 1);
-    assertTrue("not an ARecord", recs[0] instanceof ARecord);
+    assertTrue("not an ARecord", recs.get(0) instanceof ARecord);
 
     // lookup dyanmic reverse records
     recs = assertDNSQuery("19.0.17.172.in-addr.arpa.", Type.PTR, 1);
     assertEquals("wrong result",
         "httpd-1.test1.root.dev.test.",
-        ((PTRRecord) recs[0]).getTarget().toString());
+        ((PTRRecord) recs.get(0)).getTarget().toString());
 
     // now lookup static reverse records
     Name name = Name.fromString("5.0.17.172.in-addr.arpa.");
     Record question = Record.newRecord(name, Type.PTR, DClass.IN);
     Message query = Message.newQuery(question);
-    OPTRecord optRecord = new OPTRecord(4096, 0, 0, Flags.DO, null);
+    OPTRecord optRecord = new OPTRecord(4096, 0, 0, Flags.DO);
     query.addRecord(optRecord, Section.ADDITIONAL);
     byte[] responseBytes = getRegistryDNS().generateReply(query, null);
     Message response = new Message(responseBytes);
-    recs = response.getSectionArray(Section.ANSWER);
+    recs = response.getSection(Section.ANSWER);
     assertEquals("wrong result", "cn005.dev.test.",
-        ((PTRRecord) recs[0]).getTarget().toString());
+        ((PTRRecord) recs.get(0)).getTarget().toString());
   }
 
   @Test
@@ -655,8 +659,7 @@ public class TestRegistryDNS extends Assert {
     getRegistryDNS().initializeZones(conf);
 
     // start assessing whether correct records are available
-    Record[] recs =
-        assertDNSQueryNotNull("mail.yahoo.com.", Type.CNAME, 1);
+    assertDNSQueryNotNull("mail.yahoo.com.", Type.CNAME, 1);
   }
 
   @Test
@@ -672,8 +675,7 @@ public class TestRegistryDNS extends Assert {
     getRegistryDNS().initializeZones(conf);
 
     // start assessing whether correct records are available
-    Record[] recs =
-        assertDNSQueryNotNull(".", Type.NS, 13);
+    assertDNSQueryNotNull(".", Type.NS, 13);
   }
 
   @Test
@@ -692,10 +694,10 @@ public class TestRegistryDNS extends Assert {
         record2);
 
     // start assessing whether correct records are available
-    Record[] recs =
+    List<Record> recs =
         assertDNSQuery("httpd.test1.root.dev.test.", 2);
-    assertTrue("not an ARecord", recs[0] instanceof ARecord);
-    assertTrue("not an ARecord", recs[1] instanceof ARecord);
+    assertTrue("not an ARecord", recs.get(0) instanceof ARecord);
+    assertTrue("not an ARecord", recs.get(1) instanceof ARecord);
   }
 
   @Test(timeout=5000)
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java
index 66f3781..9a5ff77 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.mapreduce.jobhistory;
 
 import java.util.Set;
 
-import org.apache.avro.util.Utf8;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
@@ -31,8 +30,8 @@ public class JobQueueChangeEvent implements HistoryEvent {
   private JobQueueChange datum = new JobQueueChange();
   
   public JobQueueChangeEvent(JobID id, String queueName) {
-    datum.jobid = new Utf8(id.toString());
-    datum.jobQueueName = new Utf8(queueName);
+    datum.setJobid(id.toString());
+    datum.setJobQueueName(queueName);
   }
   
   JobQueueChangeEvent() { }
@@ -54,13 +53,14 @@ public class JobQueueChangeEvent implements HistoryEvent {
   
   /** Get the Job ID */
   public JobID getJobId() {
-    return JobID.forName(datum.jobid.toString());
+    return JobID.forName(datum.getJobid().toString());
   }
   
   /** Get the new Job queue name */
   public String getJobQueueName() {
-    if (datum.jobQueueName != null) {
-      return datum.jobQueueName.toString();
+    java.lang.CharSequence jobQueueName = datum.getJobQueueName();
+    if (jobQueueName != null) {
+      return jobQueueName.toString();
     }
     return null;
   }
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 35067ca..e21a91f 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -63,7 +63,7 @@
     <java.security.egd>file:///dev/urandom</java.security.egd>
 
     <!-- avro version -->
-    <avro.version>1.9.2</avro.version>
+    <avro.version>1.11.4</avro.version>
 
     <!-- jersey version -->
     <jersey.version>1.19.4</jersey.version>
@@ -89,10 +89,10 @@
     <!--Protobuf version for backward compatibility-->
     <protobuf.version>2.5.0</protobuf.version>
     <!-- ProtocolBuffer version, actually used in Hadoop -->
-    <hadoop.protobuf.version>3.7.1</hadoop.protobuf.version>
+    <hadoop.protobuf.version>3.23.4</hadoop.protobuf.version>
     <protoc.path>${env.HADOOP_PROTOC_PATH}</protoc.path>
 
-    <hadoop-thirdparty.version>1.1.1</hadoop-thirdparty.version>
+    <hadoop-thirdparty.version>1.3.0</hadoop-thirdparty.version>
     <hadoop-thirdparty-protobuf.version>${hadoop-thirdparty.version}</hadoop-thirdparty-protobuf.version>
     <hadoop-thirdparty-guava.version>${hadoop-thirdparty.version}</hadoop-thirdparty-guava.version>
     <hadoop-thirdparty-shaded-prefix>org.apache.hadoop.thirdparty</hadoop-thirdparty-shaded-prefix>
@@ -102,7 +102,7 @@
     <zookeeper.version>3.6.3</zookeeper.version>
     <curator.version>5.2.0</curator.version>
     <findbugs.version>3.0.5</findbugs.version>
-    <dnsjava.version>2.1.7</dnsjava.version>
+    <dnsjava.version>3.6.0</dnsjava.version>
 
     <guava.version>27.0-jre</guava.version>
     <guice.version>4.0</guice.version>
@@ -185,7 +185,7 @@
     <exec-maven-plugin.version>1.3.1</exec-maven-plugin.version>
     <make-maven-plugin.version>1.0-beta-1</make-maven-plugin.version>
     <surefire.fork.timeout>900</surefire.fork.timeout>
-    <aws-java-sdk.version>1.12.367</aws-java-sdk.version>
+    <aws-java-sdk.version>1.12.720</aws-java-sdk.version>
     <hsqldb.version>2.7.1</hsqldb.version>
     <frontend-maven-plugin.version>1.11.2</frontend-maven-plugin.version>
     <jasmine-maven-plugin.version>2.1</jasmine-maven-plugin.version>
@@ -211,7 +211,7 @@
     <solr.version>8.8.2</solr.version>
     <openssl-wildfly.version>1.1.3.Final</openssl-wildfly.version>
     <woodstox.version>5.4.0</woodstox.version>
-    <nimbus-jose-jwt.version>9.8.1</nimbus-jose-jwt.version>
+    <nimbus-jose-jwt.version>9.37.2</nimbus-jose-jwt.version>
     <nodejs.version>v12.22.1</nodejs.version>
     <yarnpkg.version>v1.22.5</yarnpkg.version>
     <apache-ant.version>1.10.13</apache-ant.version>
@@ -255,7 +255,7 @@
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop.thirdparty</groupId>
-        <artifactId>hadoop-shaded-protobuf_3_7</artifactId>
+        <artifactId>hadoop-shaded-protobuf_3_25</artifactId>
         <version>${hadoop-thirdparty-protobuf.version}</version>
       </dependency>
       <dependency>
diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
index d2ed9ed..a306596 100644
--- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
+++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
@@ -1137,7 +1137,7 @@ as it may take a couple of SDK updates before it is ready.
 1. Identify the latest AWS SDK [available for download](https://aws.amazon.com/sdk-for-java/).
 1. Create a private git branch of trunk for JIRA, and in
   `hadoop-project/pom.xml` update the `aws-java-sdk.version` to the new SDK version.
-1. Update AWS SDK versions in NOTICE.txt.
+1. Update AWS SDK versions in NOTICE.txt and LICENSE.binary
 1. Do a clean build and rerun all the `hadoop-aws` tests.
   This includes the `-Pscale` set, with a role defined for the assumed role tests.
   in `fs.s3a.assumed.role.arn` for testing assumed roles,
@@ -1159,11 +1159,18 @@ your IDE or via maven.
   `mvn dependency:tree -Dverbose > target/dependencies.txt`.
   Examine the `target/dependencies.txt` file to verify that no new
   artifacts have unintentionally been declared as dependencies
-  of the shaded `aws-java-sdk-bundle` artifact.
+  of the shaded `software.amazon.awssdk:bundle:jar` artifact.
 1. Run a full AWS-test suite with S3 client-side encryption enabled by
  setting `fs.s3a.encryption.algorithm` to 'CSE-KMS' and setting up AWS-KMS
   Key ID in `fs.s3a.encryption.key`.
 
+The dependency chain of the `hadoop-aws` module should be similar to this, albeit
+with different version numbers:
+```
+[INFO] +- org.apache.hadoop:hadoop-aws:jar:3.4.0-SNAPSHOT:compile
+[INFO] |  +- software.amazon.awssdk:bundle:jar:2.23.5:compile
+[INFO] |  \- org.wildfly.openssl:wildfly-openssl:jar:1.1.3.Final:compile
+```
 ### Basic command line regression testing
 
 We need a run through of the CLI to see if there have been changes there
@@ -1384,5 +1391,5 @@ Don't be surprised if this happens, don't worry too much, and,
 while that rollback option is there to be used, ideally try to work forwards.
 
 If the problem is with the SDK, file issues with the
- [AWS SDK Bug tracker](https://github.com/aws/aws-sdk-java/issues).
+ [AWS V2 SDK Bug tracker](https://github.com/aws/aws-sdk-java-v2/issues).
 If the problem can be fixed or worked around in the Hadoop code, do it there too.
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
index 1213e6a..603b248 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
@@ -460,7 +460,7 @@ public class JobBuilder {
     }
     task.setFinishTime(event.getFinishTime());
     task.setTaskStatus(getPre21Value(event.getTaskStatus()));
-    task.incorporateCounters(((TaskFinished) event.getDatum()).counters);
+    task.incorporateCounters(((TaskFinished) event.getDatum()).getCounters());
   }
 
   private void processTaskFailedEvent(TaskFailedEvent event) {
@@ -472,7 +472,7 @@ public class JobBuilder {
     task.setFinishTime(event.getFinishTime());
     task.setTaskStatus(getPre21Value(event.getTaskStatus()));
     TaskFailed t = (TaskFailed)(event.getDatum());
-    task.putDiagnosticInfo(t.error.toString());
+    task.putDiagnosticInfo(t.getError().toString());
     // killed task wouldn't have failed attempt.
     if (t.getFailedDueToAttempt() != null) {
       task.putFailedDueToAttemptId(t.getFailedDueToAttempt().toString());
@@ -542,7 +542,7 @@ public class JobBuilder {
     }
     attempt.setFinishTime(event.getFinishTime());
     attempt
-        .incorporateCounters(((TaskAttemptFinished) event.getDatum()).counters);
+        .incorporateCounters(((TaskAttemptFinished) event.getDatum()).getCounters());
   }
 
   private void processReduceAttemptFinishedEvent(
@@ -568,7 +568,7 @@ public class JobBuilder {
     attempt.setShuffleFinished(event.getShuffleFinishTime());
     attempt.setSortFinished(event.getSortFinishTime());
     attempt
-        .incorporateCounters(((ReduceAttemptFinished) event.getDatum()).counters);
+        .incorporateCounters(((ReduceAttemptFinished) event.getDatum()).getCounters());
     attempt.arraySetClockSplits(event.getClockSplits());
     attempt.arraySetCpuUsages(event.getCpuUsages());
     attempt.arraySetVMemKbytes(event.getVMemKbytes());
@@ -596,7 +596,7 @@ public class JobBuilder {
     // is redundant, but making this will add future-proofing.
     attempt.setFinishTime(event.getFinishTime());
     attempt
-      .incorporateCounters(((MapAttemptFinished) event.getDatum()).counters);
+      .incorporateCounters(((MapAttemptFinished) event.getDatum()).getCounters());
     attempt.arraySetClockSplits(event.getClockSplits());
     attempt.arraySetCpuUsages(event.getCpuUsages());
     attempt.arraySetVMemKbytes(event.getVMemKbytes());
@@ -661,11 +661,11 @@ public class JobBuilder {
 
     JobFinished job = (JobFinished)event.getDatum();
     Map<String, Long> countersMap =
-        JobHistoryUtils.extractCounters(job.totalCounters);
+        JobHistoryUtils.extractCounters(job.getTotalCounters());
     result.putTotalCounters(countersMap);
-    countersMap = JobHistoryUtils.extractCounters(job.mapCounters);
+    countersMap = JobHistoryUtils.extractCounters(job.getMapCounters());
     result.putMapCounters(countersMap);
-    countersMap = JobHistoryUtils.extractCounters(job.reduceCounters);
+    countersMap = JobHistoryUtils.extractCounters(job.getReduceCounters());
     result.putReduceCounters(countersMap);
   }
 
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java
index 6ae87bb..34ef95f 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java
@@ -157,9 +157,9 @@ public class JobHistoryUtils {
   static Map<String, Long> extractCounters(JhCounters counters) {
     Map<String, Long> countersMap = new HashMap<String, Long>();
     if (counters != null) {
-      for (JhCounterGroup group : counters.groups) {
-        for (JhCounter counter : group.counts) {
-          countersMap.put(counter.name.toString(), counter.value);
+      for (JhCounterGroup group : counters.getGroups()) {
+        for (JhCounter counter : group.getCounts()) {
+          countersMap.put(counter.getName().toString(), counter.getValue());
         }
       }
     }
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
index 4ae33a7..2308e58 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
@@ -268,11 +268,11 @@ public class LoggedTask implements DeepCompare {
       String counterName) {
     counterName = canonicalizeCounterName(counterName);
 
-    for (JhCounterGroup group : counters.groups) {
-      for (JhCounter counter : group.counts) {
+    for (JhCounterGroup group : counters.getGroups()) {
+      for (JhCounter counter : group.getCounts()) {
         if (counterName
-            .equals(canonicalizeCounterName(counter.name.toString()))) {
-          thunk.set(counter.value);
+            .equals(canonicalizeCounterName(counter.getName().toString()))) {
+          thunk.set(counter.getValue());
           return;
         }
       }
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
index 5c6abd3..c4ca962 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
@@ -636,11 +636,11 @@ public class LoggedTaskAttempt implements DeepCompare {
       String counterName) {
     counterName = canonicalizeCounterName(counterName);
 
-    for (JhCounterGroup group : counters.groups) {
-      for (JhCounter counter : group.counts) {
+    for (JhCounterGroup group : counters.getGroups()) {
+      for (JhCounter counter : group.getCounts()) {
         if (counterName
-            .equals(canonicalizeCounterName(counter.name.toString()))) {
-          thunk.set(counter.value);
+            .equals(canonicalizeCounterName(counter.getName().toString()))) {
+          thunk.set(counter.getValue());
           return;
         }
       }
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml
index 4acde43..4f0f2a0 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml
@@ -51,7 +51,7 @@
       <exclusions>
         <exclusion>
           <groupId>org.apache.hadoop.thirdparty</groupId>
-          <artifactId>hadoop-shaded-protobuf_3_7</artifactId>
+          <artifactId>hadoop-shaded-protobuf_3_25</artifactId>
         </exclusion>
       </exclusions>
     </dependency>
@@ -69,7 +69,7 @@
 
     <dependency>
       <groupId>org.apache.hadoop.thirdparty</groupId>
-      <artifactId>hadoop-shaded-protobuf_3_7</artifactId>
+      <artifactId>hadoop-shaded-protobuf_3_25</artifactId>
     </dependency>
 
     <dependency>
@@ -80,7 +80,7 @@
       <exclusions>
         <exclusion>
           <groupId>org.apache.hadoop.thirdparty</groupId>
-          <artifactId>hadoop-shaded-protobuf_3_7</artifactId>
+          <artifactId>hadoop-shaded-protobuf_3_25</artifactId>
         </exclusion>
       </exclusions>
     </dependency>

Follow ups