data-platform team mailing list archive
-
data-platform team
-
Mailing list archive
-
Message #00249
[Merge] soss/+source/hadoop:release-3.3.6-ubuntu2 into soss/+source/hadoop:lp-3.3.6
Enrico Deusebio has proposed merging soss/+source/hadoop:release-3.3.6-ubuntu2 into soss/+source/hadoop:lp-3.3.6.
Requested reviews:
Paolo Sottovia (paolosottovia)
Canonical Data Platform (data-platform)
For more details, see:
https://code.launchpad.net/~data-platform/soss/+source/hadoop/+git/hadoop/+merge/471407
Release 3.3.6-ubuntu2 featuring the fixes of the following prs:
* https://code.launchpad.net/~fabiantoepfer/soss/+source/hadoop/+git/hadoop/+merge/468616
* https://code.launchpad.net/~fabiantoepfer/soss/+source/hadoop/+git/hadoop/+merge/467993
Credits to ~fabiantoepfer for the amazing job there!
--
Your team Canonical Data Platform is requested to review the proposed merge of soss/+source/hadoop:release-3.3.6-ubuntu2 into soss/+source/hadoop:lp-3.3.6.
diff --git a/LICENSE-binary b/LICENSE-binary
index cbeec02..ed04c84 100644
--- a/LICENSE-binary
+++ b/LICENSE-binary
@@ -302,7 +302,7 @@ io.swagger:swagger-annotations:1.5.4
javax.inject:javax.inject:1
net.java.dev.jna:jna:5.2.0
net.minidev:accessors-smart:2.4.7
-org.apache.avro:avro:1.7.7
+org.apache.avro:avro:1.9.2
org.apache.commons:commons-collections4:4.2
org.apache.commons:commons-compress:1.26.1
org.apache.commons:commons-configuration2:2.10.1
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java
index 1d62c0a..3a9be12 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsJsonBuilder.java
@@ -21,8 +21,9 @@ package org.apache.hadoop.metrics2;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.ObjectWriter;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
index 31fe3c6..060874a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
@@ -103,7 +103,10 @@ public class VersionInfo {
* @return the Hadoop version string, eg. "0.6.3-dev"
*/
public static String getVersion() {
- return COMMON_VERSION_INFO._getVersion();
+ String version = COMMON_VERSION_INFO._getVersion();
+ if (version.indexOf("-ubuntu") > -1)
+ return version.substring(0, version.indexOf("-ubuntu"));
+ return version;
}
/**
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java
index ec76ea0..9c9b75f 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java
@@ -41,7 +41,7 @@ public class AvroTestUtil {
// check that schema matches expected
Schema s = ReflectData.get().getSchema(type);
- assertEquals(Schema.parse(schema), s);
+ assertEquals(new Schema.Parser().parse(schema), s);
// check that value is serialized correctly
ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java
index 5e71601..1145926 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java
@@ -119,7 +119,7 @@ public class TestEnumSetWritable {
public void testAvroReflect() throws Exception {
String schema = "{\"type\":\"array\",\"items\":{\"type\":\"enum\","
+ "\"name\":\"TestEnumSet\","
- + "\"namespace\":\"org.apache.hadoop.io.TestEnumSetWritable$\","
+ + "\"namespace\":\"org.apache.hadoop.io.TestEnumSetWritable\","
+ "\"symbols\":[\"CREATE\",\"OVERWRITE\",\"APPEND\"]},"
+ "\"java-class\":\"org.apache.hadoop.io.EnumSetWritable\"}";
Type type =
diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterClientRejectOverload.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterClientRejectOverload.java
index cc7f5a6..71ec747 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterClientRejectOverload.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterClientRejectOverload.java
@@ -50,7 +50,8 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.StandbyException;
import org.apache.hadoop.test.GenericTestUtils;
-import org.codehaus.jackson.map.ObjectMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
index 7e07718..cb4f021 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/ProvidedVolumeImpl.java
@@ -32,6 +32,10 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
+
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -60,10 +64,6 @@ import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Timer;
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.ObjectReader;
-import org.codehaus.jackson.map.ObjectWriter;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
@@ -371,14 +371,11 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
- private static final ObjectReader READER =
- new ObjectMapper().reader(ProvidedBlockIteratorState.class);
private static class ProvidedBlockIteratorState {
ProvidedBlockIteratorState() {
iterStartMs = Time.now();
lastSavedMs = iterStartMs;
- atEnd = false;
lastBlockId = -1L;
}
@@ -390,9 +387,6 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
@JsonProperty
private long iterStartMs;
- @JsonProperty
- private boolean atEnd;
-
// The id of the last block read when the state of the iterator is saved.
// This implementation assumes that provided blocks are returned
// in sorted order of the block ids.
diff --git a/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml b/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml
index 4e459b6..8631ebb 100644
--- a/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml
+++ b/hadoop-mapreduce-project/dev-support/findbugs-exclude.xml
@@ -291,6 +291,14 @@
<Bug pattern="EQ_COMPARETO_USE_OBJECT_EQUALS" />
</Match>
+ <!--
+ Ignore untidy code generated by Avro
+ -->
+ <Match>
+ <Class name="org.apache.hadoop.mapreduce.jobhistory.JobSubmitted" />
+ <Bug pattern="NP_NULL_INSTANCEOF" />
+ </Match>
+
<Match>
<Class name="org.apache.hadoop.mapred.Task" />
<Method name="reportFatalError" />
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 2f1ae8c..35067ca 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -63,7 +63,7 @@
<java.security.egd>file:///dev/urandom</java.security.egd>
<!-- avro version -->
- <avro.version>1.7.7</avro.version>
+ <avro.version>1.9.2</avro.version>
<!-- jersey version -->
<jersey.version>1.19.4</jersey.version>
@@ -908,6 +908,22 @@
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-core-asl</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-mapper-asl</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-jaxrs</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-xc</artifactId>
+ </exclusion>
</exclusions>
</dependency>
<dependency>
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java
index 9f1aa8e..369bb07 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java
@@ -451,7 +451,7 @@ public final class AzureADAuthenticator {
long expiresOnInSecs = -1;
JsonFactory jf = new JsonFactory();
- JsonParser jp = jf.createJsonParser(httpResponseStream);
+ JsonParser jp = jf.createParser(httpResponseStream);
String fieldName, fieldValue;
jp.nextToken();
while (jp.hasCurrentToken()) {
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
index a47720a..5cd8c22 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
@@ -505,7 +505,7 @@ public class AbfsHttpOperation implements AbfsPerfLoggable {
return;
}
JsonFactory jf = new JsonFactory();
- try (JsonParser jp = jf.createJsonParser(stream)) {
+ try (JsonParser jp = jf.createParser(stream)) {
String fieldName, fieldValue;
jp.nextToken(); // START_OBJECT - {
jp.nextToken(); // FIELD_NAME - "error":
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/ListResultSchemaTest.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/ListResultSchemaTest.java
index 8a33ea5..3f6a487 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/ListResultSchemaTest.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/ListResultSchemaTest.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.fs.azurebfs.contract;
import java.io.IOException;
-import org.codehaus.jackson.map.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Test;
import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultEntrySchema;
diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java
index ee0810d..f6c8a6a 100644
--- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java
+++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/main/java/org/apache/hadoop/tools/dynamometer/DynoInfraUtils.java
@@ -54,9 +54,10 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.JsonParser;
-import org.codehaus.jackson.JsonToken;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonToken;
import org.slf4j.Logger;
@@ -484,7 +485,7 @@ public final class DynoInfraUtils {
final Set<String> dataNodesToReport = new HashSet<>();
JsonFactory fac = new JsonFactory();
- JsonParser parser = fac.createJsonParser(IOUtils
+ JsonParser parser = fac.createParser(IOUtils
.toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name()));
int objectDepth = 0;
@@ -554,7 +555,7 @@ public final class DynoInfraUtils {
}
InputStream in = conn.getInputStream();
JsonFactory fac = new JsonFactory();
- JsonParser parser = fac.createJsonParser(in);
+ JsonParser parser = fac.createParser(in);
if (parser.nextToken() != JsonToken.START_OBJECT
|| parser.nextToken() != JsonToken.FIELD_NAME
|| !parser.getCurrentName().equals("beans")
diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java
index 7a3e22b..3527d6b 100644
--- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java
+++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/synthetic/SynthTraceJobProducer.java
@@ -17,6 +17,13 @@
*/
package org.apache.hadoop.yarn.sls.synthetic;
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonFactoryBuilder;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.math3.distribution.AbstractRealDistribution;
@@ -30,18 +37,13 @@ import org.apache.hadoop.tools.rumen.JobStoryProducer;
import org.apache.hadoop.yarn.api.records.ExecutionType;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.sls.appmaster.MRAMSimulator;
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.codehaus.jackson.map.JsonMappingException;
-import org.codehaus.jackson.map.ObjectMapper;
import javax.xml.bind.annotation.XmlRootElement;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
-import static org.codehaus.jackson.JsonParser.Feature.INTERN_FIELD_NAMES;
-import static org.codehaus.jackson.map.DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES;
+import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES;
/**
* This is a JobStoryProducer that operates from distribution of different
@@ -84,15 +86,16 @@ public class SynthTraceJobProducer implements JobStoryProducer {
this.conf = conf;
this.rand = new JDKRandomGenerator();
- ObjectMapper mapper = new ObjectMapper();
- mapper.configure(INTERN_FIELD_NAMES, true);
+ JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
+ jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
+ ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
FileSystem ifs = path.getFileSystem(conf);
FSDataInputStream fileIn = ifs.open(path);
// Initialize the random generator and the seed
- this.trace = mapper.readValue(fileIn, Trace.class);
+ this.trace = mapper.readValue(fileIn.getWrappedStream(), Trace.class);
this.seed = trace.rand_seed;
this.rand.setSeed(seed);
// Initialize the trace
@@ -538,9 +541,9 @@ public class SynthTraceJobProducer implements JobStoryProducer {
if(val!=null){
if(std==null){
// Constant
- if(dist!=null || discrete!=null || weights!=null){
- throw new JsonMappingException("Instantiation of " + Sample.class
- + " failed");
+ if (dist != null || discrete != null || weights != null) {
+ throw JsonMappingException
+ .from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
}
mode = Mode.CONST;
this.val = val;
@@ -550,9 +553,9 @@ public class SynthTraceJobProducer implements JobStoryProducer {
this.weights = null;
} else {
// Distribution
- if(discrete!=null || weights != null){
- throw new JsonMappingException("Instantiation of " + Sample.class
- + " failed");
+ if (discrete != null || weights != null) {
+ throw JsonMappingException
+ .from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
}
mode = Mode.DIST;
this.val = val;
@@ -563,9 +566,9 @@ public class SynthTraceJobProducer implements JobStoryProducer {
}
} else {
// Discrete
- if(discrete==null){
- throw new JsonMappingException("Instantiation of " + Sample.class
- + " failed");
+ if (discrete == null) {
+ throw JsonMappingException
+ .from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
}
mode = Mode.DISC;
this.val = 0;
@@ -576,9 +579,9 @@ public class SynthTraceJobProducer implements JobStoryProducer {
weights = new ArrayList<>(Collections.nCopies(
discrete.size(), 1.0));
}
- if(weights.size() != discrete.size()){
- throw new JsonMappingException("Instantiation of " + Sample.class
- + " failed");
+ if (weights.size() != discrete.size()) {
+ throw JsonMappingException
+ .from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
}
this.weights = weights;
}
diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java
index 0792eec..14e7475 100644
--- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java
+++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java
@@ -19,11 +19,14 @@ package org.apache.hadoop.yarn.sls;
import org.apache.commons.math3.random.JDKRandomGenerator;
import org.apache.hadoop.yarn.api.records.ExecutionType;
-import org.codehaus.jackson.map.JsonMappingException;
-import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.sls.synthetic.SynthJob;
import org.apache.hadoop.yarn.sls.synthetic.SynthTraceJobProducer;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonFactoryBuilder;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
@@ -32,12 +35,10 @@ import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Arrays;
+import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import static org.codehaus.jackson.JsonParser.Feature.INTERN_FIELD_NAMES;
-import static org.codehaus.jackson.map.DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES;
-
/**
* Simple test class driving the {@code SynthTraceJobProducer}, and validating
* jobs produce are within expected range.
@@ -56,8 +57,9 @@ public class TestSynthJobGeneration {
+ "{\"time\": 60, \"weight\": 2}," + "{\"time\": 90, \"weight\": 1}"
+ "]}";
- ObjectMapper mapper = new ObjectMapper();
- mapper.configure(INTERN_FIELD_NAMES, true);
+ JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
+ jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
+ ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
SynthTraceJobProducer.Workload wl =
mapper.readValue(workloadJson, SynthTraceJobProducer.Workload.class);
@@ -176,8 +178,9 @@ public class TestSynthJobGeneration {
@Test
public void testSample() throws IOException {
- ObjectMapper mapper = new ObjectMapper();
- mapper.configure(INTERN_FIELD_NAMES, true);
+ JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
+ jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
+ ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
JDKRandomGenerator rand = new JDKRandomGenerator();
@@ -235,7 +238,7 @@ public class TestSynthJobGeneration {
mapper.readValue(invalidDistJson, SynthTraceJobProducer.Sample.class);
Assert.fail();
} catch (JsonMappingException e) {
- Assert.assertTrue(e.getMessage().startsWith("Instantiation of"));
+ Assert.assertTrue(e.getMessage().startsWith("Cannot construct instance of"));
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java
index c996225..91002e4 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java
@@ -28,11 +28,12 @@ import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.security.DockerCredentialTokenIdentifier;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.JsonNode;
-import org.codehaus.jackson.JsonParser;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.node.ObjectNode;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ObjectNode;
import org.slf4j.LoggerFactory;
import java.io.File;
@@ -97,13 +98,13 @@ public final class DockerClientConfigHandler {
// Parse the JSON and create the Tokens/Credentials.
ObjectMapper mapper = new ObjectMapper();
- JsonFactory factory = mapper.getJsonFactory();
- JsonParser parser = factory.createJsonParser(contents);
+ JsonFactory factory = mapper.getFactory();
+ JsonParser parser = factory.createParser(contents);
JsonNode rootNode = mapper.readTree(parser);
Credentials credentials = new Credentials();
if (rootNode.has(CONFIG_AUTHS_KEY)) {
- Iterator<String> iter = rootNode.get(CONFIG_AUTHS_KEY).getFieldNames();
+ Iterator<String> iter = rootNode.get(CONFIG_AUTHS_KEY).fieldNames();
for (; iter.hasNext();) {
String registryUrl = iter.next();
String registryCred = rootNode.get(CONFIG_AUTHS_KEY)
@@ -169,14 +170,14 @@ public final class DockerClientConfigHandler {
DockerCredentialTokenIdentifier ti =
(DockerCredentialTokenIdentifier) tk.decodeIdentifier();
ObjectNode registryCredNode = mapper.createObjectNode();
- registryUrlNode.put(ti.getRegistryUrl(), registryCredNode);
+ registryUrlNode.set(ti.getRegistryUrl(), registryCredNode);
registryCredNode.put(CONFIG_AUTH_KEY,
new String(tk.getPassword(), Charset.forName("UTF-8")));
LOG.debug("Prepared token for write: {}", tk);
}
}
if (foundDockerCred) {
- rootNode.put(CONFIG_AUTHS_KEY, registryUrlNode);
+ rootNode.set(CONFIG_AUTHS_KEY, registryUrlNode);
String json = mapper.writerWithDefaultPrettyPrinter()
.writeValueAsString(rootNode);
FileUtils.writeStringToFile(
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
index 1f3dd09..faa2b29 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/pom.xml
@@ -143,14 +143,6 @@
<artifactId>jettison</artifactId>
</dependency>
<dependency>
- <groupId>com.sun.jersey</groupId>
- <artifactId>jersey-core</artifactId>
- </dependency>
- <dependency>
- <groupId>com.sun.jersey</groupId>
- <artifactId>jersey-client</artifactId>
- </dependency>
- <dependency>
<groupId>org.apache.hadoop.thirdparty</groupId>
<artifactId>hadoop-shaded-guava</artifactId>
</dependency>
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java
index e59454b..2fff381 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkTagMappingJsonManager.java
@@ -25,15 +25,17 @@ import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
-import org.codehaus.jackson.annotate.JsonIgnore;
-import org.codehaus.jackson.annotate.JsonProperty;
-import org.codehaus.jackson.map.ObjectMapper;
/**
* The NetworkTagMapping JsonManager implementation.
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java
index 707b5e6..d469309 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java
@@ -62,8 +62,6 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.Contai
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.volume.csi.ContainerVolumePublisher;
import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerExecContext;
-import org.codehaus.jackson.JsonNode;
-import org.codehaus.jackson.map.ObjectMapper;
import java.io.File;
import java.io.IOException;
@@ -81,6 +79,9 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_IMAGE_TAG_TO_MANIFEST_PLUGIN;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_LAYER_MOUNTS_TO_KEEP;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL;
@@ -642,7 +643,7 @@ public class RuncContainerRuntime extends OCIContainerRuntime {
if (envNode.isMissingNode()) {
return null;
}
- return mapper.readValue(envNode, List.class);
+ return mapper.readValue(envNode.traverse(), List.class);
}
@SuppressWarnings("unchecked")
@@ -653,7 +654,7 @@ public class RuncContainerRuntime extends OCIContainerRuntime {
if (entrypointNode.isMissingNode()) {
return null;
}
- return mapper.readValue(entrypointNode, List.class);
+ return mapper.readValue(entrypointNode.traverse(), List.class);
}
private RuncContainerExecutorConfig createRuncContainerExecutorConfig(
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java
index 629785d..fbec3ee 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/ImageTagToManifestPlugin.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.concurrent.HadoopExecutors;
-import org.codehaus.jackson.map.ObjectMapper;
import java.io.BufferedReader;
import java.io.File;
@@ -45,6 +44,8 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_CACHE_REFRESH_INTERVAL;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_IMAGE_TOPLEVEL_DIR;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NUM_MANIFESTS_TO_CACHE;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/RuncContainerExecutorConfig.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/RuncContainerExecutorConfig.java
index 88a01a2..3333b82 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/RuncContainerExecutorConfig.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/runc/RuncContainerExecutorConfig.java
@@ -20,12 +20,13 @@
package org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc;
import org.apache.hadoop.classification.InterfaceStability;
-import org.codehaus.jackson.annotate.JsonRawValue;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
import java.util.List;
import java.util.Map;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonRawValue;
+
/**
* This class is used by the
* {@link org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.RuncContainerRuntime}
@@ -35,7 +36,7 @@ import java.util.Map;
* a JSON object named ociRuntimeConfig that mirrors the
* OCI runtime specification.
*/
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_DEFAULT)
+@JsonInclude(JsonInclude.Include.NON_DEFAULT)
@InterfaceStability.Unstable
public class RuncContainerExecutorConfig {
final private String version;
@@ -164,7 +165,7 @@ public class RuncContainerExecutorConfig {
/**
* This class is a Java representation of an OCI image layer.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
@InterfaceStability.Unstable
public static class OCILayer {
final private String mediaType;
@@ -192,7 +193,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the OCI Runtime Specification.
*/
@InterfaceStability.Unstable
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCIRuntimeConfig {
final private OCIRootConfig root;
final private List<OCIMount> mounts;
@@ -254,7 +255,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the oci root config section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCIRootConfig {
public String getPath() {
return path;
@@ -281,7 +282,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the oci mount section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCIMount {
final private String destination;
final private String type;
@@ -329,7 +330,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the oci process section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCIProcessConfig {
final private boolean terminal;
final private ConsoleSize consoleSize;
@@ -422,7 +423,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the console size section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class ConsoleSize {
public int getHeight() {
return height;
@@ -450,7 +451,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the rlimits section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class RLimits {
public String getType() {
return type;
@@ -484,7 +485,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the capabilities section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Capabilities {
final private List<String> effective;
final private List<String> bounding;
@@ -554,7 +555,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the oci hooks section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCIHooksConfig {
final private List<HookType> prestart;
final private List<HookType> poststart;
@@ -587,7 +588,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the hook type section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class HookType {
final private String path;
final private List<String> args;
@@ -650,7 +651,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the oci linux config section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCILinuxConfig {
final private List<Namespace> namespaces;
final private List<IDMapping> uidMappings;
@@ -768,7 +769,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the idmapping section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class IDMapping {
final private int containerID;
final private int hostID;
@@ -802,7 +803,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the device section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Device {
final private String type;
final private String path;
@@ -861,7 +862,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the resources section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Resources {
final private List<Device> device;
final private Memory memory;
@@ -927,7 +928,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the device section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Device {
final private boolean allow;
final private String type;
@@ -973,7 +974,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the memory section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Memory {
final private long limit;
final private long reservation;
@@ -1032,7 +1033,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the cpu section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class CPU {
final private long quota;
final private long period;
@@ -1092,7 +1093,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the blockio section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class BlockIO {
final private int weight;
final private int leafWeight;
@@ -1153,7 +1154,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the weight device section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class WeightDevice {
final private long major;
final private long minor;
@@ -1193,7 +1194,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the throttle device section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class ThrottleDevice {
final private long major;
final private long minor;
@@ -1227,7 +1228,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the huge page limits section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class HugePageLimits {
final private String pageSize;
final private long limit;
@@ -1254,7 +1255,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the network section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Network {
final private int classID;
final private List<NetworkPriority> priorities;
@@ -1280,7 +1281,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the network priority section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class NetworkPriority {
final private String name;
final private int priority;
@@ -1308,7 +1309,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the pid section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class PID {
final private long limit;
@@ -1329,7 +1330,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the rdma section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class RDMA {
final private int hcaHandles;
final private int hcaObjects;
@@ -1357,7 +1358,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the intelrdt section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class IntelRdt {
final private String closID;
final private String l3CacheSchema;
@@ -1391,7 +1392,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the sysctl section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Sysctl {
// for kernel params
}
@@ -1400,7 +1401,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the seccomp section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Seccomp {
final private String defaultAction;
final private List<String> architectures;
@@ -1433,7 +1434,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the syscall section
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Syscall {
final private List<String> names;
final private String action;
@@ -1466,7 +1467,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the seccomp arguments
* of the OCI Runtime Specification.
*/
- @JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT)
+ @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class SeccompArg {
final private int index;
final private long value;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java
index 5147176..e7e6c65 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java
@@ -57,7 +57,6 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.Reso
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerExecutionException;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeConstants;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext;
-import org.codehaus.jackson.map.ObjectMapper;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestImageTagToManifestPlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestImageTagToManifestPlugin.java
index 73bfa02..3c2a951 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestImageTagToManifestPlugin.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestImageTagToManifestPlugin.java
@@ -24,7 +24,8 @@ import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.ImageManifest;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.ImageTagToManifestPlugin;
-import org.codehaus.jackson.map.ObjectMapper;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestRuncContainerRuntime.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestRuncContainerRuntime.java
index 1e06d03..8a541bb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestRuncContainerRuntime.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestRuncContainerRuntime.java
@@ -58,8 +58,9 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.Contai
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeConstants;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
-import org.codehaus.jackson.JsonNode;
-import org.codehaus.jackson.map.ObjectMapper;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@@ -501,7 +502,7 @@ public class TestRuncContainerRuntime {
JsonNode configNode = mapper.readTree(configFile);
RuncContainerExecutorConfig runcContainerExecutorConfig =
- mapper.readValue(configNode, RuncContainerExecutorConfig.class);
+ mapper.readValue(configNode.traverse(), RuncContainerExecutorConfig.class);
configSize = configNode.size();
OCIRuntimeConfig ociRuntimeConfig =
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java
index 983504e..d8b5af4 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceProfilesManagerImpl.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.server.resourcemanager.resource;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@@ -29,7 +30,6 @@ import org.apache.hadoop.yarn.exceptions.YARNFeatureNotEnabledException;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.resource.ResourceUtils;
import org.apache.hadoop.yarn.util.resource.Resources;
-import org.codehaus.jackson.map.ObjectMapper;
import java.io.File;
import java.io.IOException;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/DocumentStoreTestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/DocumentStoreTestUtils.java
index cf57085..5d44215 100755
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/DocumentStoreTestUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/DocumentStoreTestUtils.java
@@ -23,11 +23,12 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.entity.TimelineEntityDocument;
import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.flowactivity.FlowActivityDocument;
import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.flowrun.FlowRunDocument;
-import org.codehaus.jackson.type.TypeReference;
import java.io.IOException;
import java.util.List;
+import com.fasterxml.jackson.core.type.TypeReference;
+
/**
* This is util class for baking sample TimelineEntities data for test.
*/
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/JsonUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/JsonUtils.java
index c1da4f6..a644bc1 100755
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/JsonUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/JsonUtils.java
@@ -18,12 +18,12 @@
package org.apache.hadoop.yarn.server.timelineservice.documentstore;
-import org.codehaus.jackson.map.DeserializationConfig;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.type.TypeReference;
-
import java.io.IOException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
/**
* A simple util class for Json SerDe.
*/
@@ -34,8 +34,7 @@ public final class JsonUtils {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
static {
- OBJECT_MAPPER.configure(
- DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+ OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
}
/**
@@ -48,7 +47,7 @@ public final class JsonUtils {
* @throws IOException if Json String is not valid or error
* while deserialization
*/
- public static <T> T fromJson(final String jsonStr, final TypeReference type)
+ public static <T> T fromJson(final String jsonStr, final TypeReference<T> type)
throws IOException {
return OBJECT_MAPPER.readValue(jsonStr, type);
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml
index ef2dc70..4771029 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml
@@ -176,6 +176,12 @@
</dependency>
<dependency>
+ <groupId>org.xerial.snappy</groupId>
+ <artifactId>snappy-java</artifactId>
+ <scope>runtime</scope>
+ </dependency>
+
+ <dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-client</artifactId>
<scope>test</scope>
diff --git a/pom.xml b/pom.xml
index aaa4203..19d2013 100644
--- a/pom.xml
+++ b/pom.xml
@@ -193,6 +193,14 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x
<bannedImport>org.glassfish.grizzly.**</bannedImport>
</bannedImports>
</restrictImports>
+ <restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
+ <includeTestCode>true</includeTestCode>
+ <reason>Use Fasterxml Jackson 2 dependency in place of org.codehaus Jackson 1</reason>
+ <bannedImports>
+ <bannedImport>org.codehaus.jackson.**</bannedImport>
+ <bannedImport>static org.codehaus.jackson.**</bannedImport>
+ </bannedImports>
+ </restrictImports>
</rules>
</configuration>
</execution>