diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java index b2d2a8d100ff1..bf66bd06fd23d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java @@ -33,7 +33,7 @@ public class TestAvroSerialization { @Test public void testSpecific() throws Exception { AvroRecord before = new AvroRecord(); - before.intField = 5; + before.put("intField", 5); AvroRecord after = SerializationTestUtil.testSerialization(conf, before); assertEquals(before, after); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java index 66f378123986d..08ba27fe91895 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java @@ -31,8 +31,8 @@ public class JobQueueChangeEvent implements HistoryEvent { private JobQueueChange datum = new JobQueueChange(); public JobQueueChangeEvent(JobID id, String queueName) { - datum.jobid = new Utf8(id.toString()); - datum.jobQueueName = new Utf8(queueName); + datum.setJobid(id.toString()); + datum.setJobQueueName(queueName); } JobQueueChangeEvent() { } @@ -54,13 +54,14 @@ public void setDatum(Object datum) { /** Get the Job ID */ public JobID getJobId() { - return JobID.forName(datum.jobid.toString()); + return JobID.forName(datum.getJobid().toString()); } /** Get the new Job queue name */ public String getJobQueueName() { - if (datum.jobQueueName != null) { - return datum.jobQueueName.toString(); + java.lang.CharSequence jobQueueName = datum.getJobQueueName(); + if (jobQueueName != null) { + return jobQueueName.toString(); } return null; } diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 4e0b7475cc51d..d7fa2dafabd93 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -63,7 +63,7 @@ file:///dev/urandom - 1.9.2 + 1.11.4 1.19.4 diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java index 1213e6a46f449..603b248f6e848 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java @@ -460,7 +460,7 @@ private void processTaskFinishedEvent(TaskFinishedEvent event) { } task.setFinishTime(event.getFinishTime()); task.setTaskStatus(getPre21Value(event.getTaskStatus())); - task.incorporateCounters(((TaskFinished) event.getDatum()).counters); + task.incorporateCounters(((TaskFinished) event.getDatum()).getCounters()); } private void processTaskFailedEvent(TaskFailedEvent event) { @@ -472,7 +472,7 @@ private void processTaskFailedEvent(TaskFailedEvent event) { task.setFinishTime(event.getFinishTime()); task.setTaskStatus(getPre21Value(event.getTaskStatus())); TaskFailed t = (TaskFailed)(event.getDatum()); - task.putDiagnosticInfo(t.error.toString()); + task.putDiagnosticInfo(t.getError().toString()); // killed task wouldn't have failed attempt. if (t.getFailedDueToAttempt() != null) { task.putFailedDueToAttemptId(t.getFailedDueToAttempt().toString()); @@ -542,7 +542,7 @@ private void processTaskAttemptFinishedEvent(TaskAttemptFinishedEvent event) { } attempt.setFinishTime(event.getFinishTime()); attempt - .incorporateCounters(((TaskAttemptFinished) event.getDatum()).counters); + .incorporateCounters(((TaskAttemptFinished) event.getDatum()).getCounters()); } private void processReduceAttemptFinishedEvent( @@ -568,7 +568,7 @@ private void processReduceAttemptFinishedEvent( attempt.setShuffleFinished(event.getShuffleFinishTime()); attempt.setSortFinished(event.getSortFinishTime()); attempt - .incorporateCounters(((ReduceAttemptFinished) event.getDatum()).counters); + .incorporateCounters(((ReduceAttemptFinished) event.getDatum()).getCounters()); attempt.arraySetClockSplits(event.getClockSplits()); attempt.arraySetCpuUsages(event.getCpuUsages()); attempt.arraySetVMemKbytes(event.getVMemKbytes()); @@ -596,7 +596,7 @@ private void processMapAttemptFinishedEvent(MapAttemptFinishedEvent event) { // is redundant, but making this will add future-proofing. attempt.setFinishTime(event.getFinishTime()); attempt - .incorporateCounters(((MapAttemptFinished) event.getDatum()).counters); + .incorporateCounters(((MapAttemptFinished) event.getDatum()).getCounters()); attempt.arraySetClockSplits(event.getClockSplits()); attempt.arraySetCpuUsages(event.getCpuUsages()); attempt.arraySetVMemKbytes(event.getVMemKbytes()); @@ -661,11 +661,11 @@ private void processJobFinishedEvent(JobFinishedEvent event) { JobFinished job = (JobFinished)event.getDatum(); Map countersMap = - JobHistoryUtils.extractCounters(job.totalCounters); + JobHistoryUtils.extractCounters(job.getTotalCounters()); result.putTotalCounters(countersMap); - countersMap = JobHistoryUtils.extractCounters(job.mapCounters); + countersMap = JobHistoryUtils.extractCounters(job.getMapCounters()); result.putMapCounters(countersMap); - countersMap = JobHistoryUtils.extractCounters(job.reduceCounters); + countersMap = JobHistoryUtils.extractCounters(job.getReduceCounters()); result.putReduceCounters(countersMap); } diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java index 6ae87bbd40a11..34ef95f337858 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java @@ -157,9 +157,9 @@ static boolean isJobConfXml(String fileName) { static Map extractCounters(JhCounters counters) { Map countersMap = new HashMap(); if (counters != null) { - for (JhCounterGroup group : counters.groups) { - for (JhCounter counter : group.counts) { - countersMap.put(counter.name.toString(), counter.value); + for (JhCounterGroup group : counters.getGroups()) { + for (JhCounter counter : group.getCounts()) { + countersMap.put(counter.getName().toString(), counter.getValue()); } } } diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java index 4ae33a76617fb..2308e586900bb 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java @@ -268,11 +268,11 @@ private static void incorporateCounter(SetField thunk, JhCounters counters, String counterName) { counterName = canonicalizeCounterName(counterName); - for (JhCounterGroup group : counters.groups) { - for (JhCounter counter : group.counts) { + for (JhCounterGroup group : counters.getGroups()) { + for (JhCounter counter : group.getCounts()) { if (counterName - .equals(canonicalizeCounterName(counter.name.toString()))) { - thunk.set(counter.value); + .equals(canonicalizeCounterName(counter.getName().toString()))) { + thunk.set(counter.getValue()); return; } } diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java index 5c6abd372c081..c4ca962b6e69f 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java @@ -636,11 +636,11 @@ private static void incorporateCounter(SetField thunk, JhCounters counters, String counterName) { counterName = canonicalizeCounterName(counterName); - for (JhCounterGroup group : counters.groups) { - for (JhCounter counter : group.counts) { + for (JhCounterGroup group : counters.getGroups()) { + for (JhCounter counter : group.getCounts()) { if (counterName - .equals(canonicalizeCounterName(counter.name.toString()))) { - thunk.set(counter.value); + .equals(canonicalizeCounterName(counter.getName().toString()))) { + thunk.set(counter.getValue()); return; } }