1+ <?xml version =" 1.0" ?>
2+ <project name =" hadoop-hdfs" default =" published"
3+ xmlns : ivy =" antlib:org.apache.ivy.ant" >
4+ <!--
5+ Licensed to the Apache Software Foundation (ASF) under one or more
6+ contributor license agreements. See the NOTICE file distributed with
7+ this work for additional information regarding copyright ownership.
8+ The ASF licenses this file to You under the Apache License, Version 2.0
9+ (the "License"); you may not use this file except in compliance with
10+ the License. You may obtain a copy of the License at
11+
12+ http://www.apache.org/licenses/LICENSE-2.0
13+
14+ Unless required by applicable law or agreed to in writing, software
15+ distributed under the License is distributed on an "AS IS" BASIS,
16+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17+ See the License for the specific language governing permissions and
18+ limitations under the License.
19+ -->
20+
21+ <description >
22+ This is a build file to publish Hadoop hdfs as ivy and maven artifacts.
23+ It currently works alongside the original build.xml file, and exists
24+ purely to hook up hadoop into the SmartFrog test/release process.
25+ </description >
26+
27+ <!-- Override point: allow for overridden in properties to be loaded-->
28+ <property file =" build.properties" />
29+ <property file =" ../build.properties" />
30+
31+
32+ <target name =" ivy-init-properties" >
33+ <property name =" ivy.dir" location =" ivy" />
34+ <loadproperties srcfile =" ${ivy.dir}/libraries.properties" />
35+ <property name =" ivysettings.xml" location =" ${ivy.dir}/ivysettings.xml" />
36+ <property name =" ivy.jar" location =" ${ivy.dir}/ivy-${ivy.version}.jar" />
37+ <property name =" ivy.org" value =" org.apache.hadoop" />
38+
39+ <property name =" build.dir" location =" build" />
40+ <property name =" build.ivy.dir" location =" ${build.dir}/ivy" />
41+ <property name =" build.ivy.lib.dir" location =" ${build.ivy.dir}/lib" />
42+ <property name =" build.ivy.report.dir" location =" ${build.ivy.dir}/report" />
43+ <property name =" build.ivy.maven.dir" location =" ${build.ivy.dir}/maven" />
44+ <property name =" module" value =" hdfs" />
45+ <property name =" build.ivy.maven.pom"
46+ location =" ${build.ivy.maven.dir}/hadoop-${module}-${hadoop.version}.pom" />
47+ <property name =" build.ivy.maven.jar"
48+ location =" ${build.ivy.maven.dir}/hadoop-${module}-${hadoop.version}.jar" />
49+
50+ <!-- this is the naming policy for artifacts we want pulled down-->
51+ <property name =" ivy.artifact.retrieve.pattern"
52+ value =" [conf]/[artifact]-[revision].[ext]" />
53+ <!-- this is how artifacts that get built are named-->
54+ <property name =" ivy.publish.pattern"
55+ value =" hadoop-[revision]-core.[ext]" />
56+ <property name =" hadoop.jar"
57+ location =" ${build.dir}/hadoop-${hadoop.version}-${module}.jar" />
58+
59+ <!-- preset to build down; puts us in control of version naming-->
60+ <presetdef name =" delegate" >
61+ <ant antfile =" build.xml" inheritall =" false" inheritrefs =" false" >
62+ <property name =" version" value =" ${hadoop.version}" />
63+ </ant >
64+ </presetdef >
65+ <!-- preset to build down; puts us in control of version naming-->
66+ <presetdef name =" delegate2" >
67+ <subant antfile =" build.xml" buildpath =" ." inheritall =" false" inheritrefs =" false" >
68+ <property name =" version" value =" ${hadoop.version}" />
69+ </subant >
70+ </presetdef >
71+
72+ <!-- preset to copy with ant property expansion (and always overwrite)-->
73+ <presetdef name =" expandingcopy" >
74+ <copy overwrite =" true" >
75+ <filterchain >
76+ <expandproperties />
77+ </filterchain >
78+ </copy >
79+ </presetdef >
80+ </target >
81+
82+
83+ <target name =" ivy-init-dirs" depends =" ivy-init-properties" >
84+ <mkdir dir =" ${build.ivy.dir}" />
85+ <mkdir dir =" ${build.ivy.lib.dir}" />
86+ <mkdir dir =" ${build.ivy.report.dir}" />
87+ <mkdir dir =" ${build.ivy.maven.dir}" />
88+ </target >
89+
90+
91+ <target name =" clean" depends =" ivy-init-properties"
92+ description =" Clean the output directories" >
93+ <delegate target =" clean" />
94+ </target >
95+
96+
97+ <target name =" jar" depends =" ivy-init-dirs"
98+ description =" build the JAR" >
99+ <delegate target =" jar" />
100+ </target >
101+
102+ <!--
103+ This looks for Ivy on the classpath, and is used to skip reloading it if found.
104+ It looks for an ivy-2.0 file.
105+ -->
106+ <target name =" ivy-probe-antlib" >
107+ <condition property =" ivy.found" >
108+ <typefound uri =" antlib:org.apache.ivy.ant" name =" cleancache" />
109+ </condition >
110+ </target >
111+
112+
113+ <!--
114+ To avoid Ivy leaking things across big projects, always load Ivy in the same classloader.
115+ Also note how we skip loading Ivy if it is already there, just to make sure all is well.
116+ -->
117+ <target name =" ivy-init-antlib" depends =" ivy-init-properties,ivy-init-dirs,ivy-probe-antlib" unless =" ivy.found" >
118+
119+ <typedef uri =" antlib:org.apache.ivy.ant" onerror =" fail"
120+ loaderRef =" ivyLoader" >
121+ <classpath >
122+ <pathelement location =" ${ivy.jar}" />
123+ </classpath >
124+ </typedef >
125+ <fail >
126+ <condition >
127+ <not >
128+ <typefound uri =" antlib:org.apache.ivy.ant" name =" cleancache" />
129+ </not >
130+ </condition >
131+ You need Apache Ivy 2.0 or later from http://ant.apache.org/
132+ It could not be loaded from ${ivy.jar}
133+ </fail >
134+ </target >
135+
136+
137+ <target name =" ivy-init" depends =" ivy-init-antlib" >
138+
139+ <!-- Configure Ivy by reading in the settings file
140+ If anyone has already read in a settings file into this settings ID, it gets priority
141+ -->
142+ <ivy : configure settingsId =" hadoop.ivy.settings" file =" ${ivysettings.xml}" override =" false" />
143+
144+ </target >
145+
146+ <target name =" ivy-resolve" depends =" ivy-init" >
147+ <ivy : resolve settingsRef =" hadoop.ivy.settings" />
148+ </target >
149+
150+ <target name =" ivy-retrieve" depends =" ivy-resolve"
151+ description =" Retrieve all Ivy-managed artifacts for the different configurations" >
152+ <ivy : retrieve settingsRef =" hadoop.ivy.settings"
153+ pattern =" ${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" sync =" true" />
154+ </target >
155+
156+ <target name =" ivy-report" depends =" ivy-resolve"
157+ description =" Generate" >
158+ <ivy : report todir =" ${build.ivy.report.dir}" settingsRef =" hadoop.ivy.settings" />
159+ <echo >
160+ Reports generated:
161+ ${build.ivy.report.dir}
162+ </echo >
163+ </target >
164+
165+ <target name =" assert-hadoop-jar-exists" depends =" ivy-init" >
166+ <fail >
167+ <condition >
168+ <not >
169+ <available file =" ${hadoop.jar}" />
170+ </not >
171+ </condition >
172+ Not found: ${hadoop.jar}
173+ Please run the target "jar" in the main build file
174+ </fail >
175+
176+ </target >
177+
178+ <target name =" ready-to-publish" depends =" jar,assert-hadoop-jar-exists,ivy-resolve" />
179+
180+ <target name =" ivy-publish-local" depends =" ready-to-publish" >
181+ <ivy : publish
182+ settingsRef =" hadoop.ivy.settings"
183+ resolver =" local"
184+ pubrevision =" ${hadoop.version}"
185+ overwrite =" true"
186+ artifactspattern =" ${build.dir}/${ivy.publish.pattern}" />
187+ </target >
188+
189+
190+ <!-- this is here for curiosity, to see how well the makepom task works
191+ Answer: it depends whether you want transitive dependencies excluded or not
192+ -->
193+ <target name =" makepom" depends =" ivy-resolve" >
194+ <ivy : makepom settingsRef =" hadoop.ivy.settings"
195+ ivyfile =" ivy.xml"
196+ pomfile =" ${build.ivy.maven.dir}/generated.pom" >
197+ <ivy : mapping conf =" default" scope =" default" />
198+ <ivy : mapping conf =" master" scope =" master" />
199+ <ivy : mapping conf =" runtime" scope =" runtime" />
200+ </ivy : makepom >
201+ </target >
202+
203+
204+ <target name =" copy-jar-to-maven" depends =" ready-to-publish" >
205+ <copy file =" ${hadoop.jar}"
206+ tofile =" ${build.ivy.maven.jar}" />
207+ <checksum file =" ${build.ivy.maven.jar}" algorithm =" md5" />
208+ </target >
209+
210+ <target name =" copypom" depends =" ivy-init-dirs" >
211+ <expandingcopy file =" ivy/hadoop-core.pom"
212+ tofile =" ${build.ivy.maven.pom}" />
213+ <checksum file =" ${build.ivy.maven.pom}" algorithm =" md5" />
214+ </target >
215+
216+ <target name =" maven-artifacts" depends =" copy-jar-to-maven,copypom" />
217+
218+ <target name =" published" depends =" ivy-publish-local,maven-artifacts" >
219+
220+ </target >
221+
222+ <target name =" ready-to-test" depends =" ivy-init-dirs" >
223+ <property name =" test.data.dir" location =" ${build.dir}/test/data" />
224+ <property name =" test.reports.dir" location =" ${build.dir}/test/reports" />
225+ <mkdir dir =" ${test.data.dir}" />
226+ <mkdir dir =" ${test.reports.dir}" />
227+ </target >
228+
229+ <target name =" testjob.jar" depends =" ready-to-test" >
230+ <delegate2 target =" jar-test"
231+ failonerror =" true" >
232+ </delegate2 >
233+ </target >
234+
235+
236+ <target name =" junit" depends =" ready-to-test,testjob.jar"
237+ description =" run the junit tests and generate an XML report" >
238+ <delegate2 target =" test-core"
239+ failonerror =" false" >
240+ <property name =" test.junit.output.format" value =" xml" />
241+ <property name =" test.build.dir" value =" ${test.data.dir}" />
242+ </delegate2 >
243+ </target >
244+
245+ <!-- generate a junit report.
246+ tip: you can run this while junit is still going on-->
247+ <target name =" junitreport" depends =" ready-to-test" >
248+ <junitreport todir =" ${test.reports.dir}" >
249+ <fileset dir =" ${test.data.dir}" >
250+ <include name =" TEST-*.xml" />
251+ </fileset >
252+ <report format =" frames" todir =" ${test.reports.dir}" />
253+ </junitreport >
254+ <echo >reports in ${test.reports.dir}/index.html</echo >
255+ </target >
256+
257+ <target name =" tested" depends =" junit,junitreport" />
258+
259+ <target name =" svn-init" >
260+ <presetdef name =" svn" >
261+ <exec executable =" svn" failonerror =" true" >
262+ </exec >
263+ </presetdef >
264+ <property name =" issue" value =" HDFS-326" />
265+ <property name =" hadoop-svn"
266+ value =" https://svn.apache.org/repos/asf/hadoop/common" />
267+ <property name =" trunk"
268+ value =" ${hadoop-svn}/trunk" />
269+ <property name =" branch"
270+ value =" ${hadoop-svn}/branches/${issue}" />
271+ <property name =" patches.dir" location =" ../outgoing" />
272+ <mkdir dir =" ${patches.dir}" />
273+ <property name =" patch.version" value =" 1" />
274+ <property name =" patch.file"
275+ location =" ${patches.dir}/${issue}-${patch.version}.patch" />
276+ </target >
277+
278+ <target name =" svn-merge" depends =" svn-init"
279+ description =" merge in the trunk" >
280+ <svn >
281+ <arg value =" merge" />
282+ <arg value =" ${trunk}" />
283+ <arg value =" --accept" />
284+ <arg value =" postpone" />
285+ </svn >
286+ </target >
287+
288+ <target name =" svn-diff" depends =" svn-init"
289+ description =" diff the local code against the branch" >
290+ <svn >
291+ <arg value =" diff" />
292+ </svn >
293+ </target >
294+
295+ <target name =" svn-resolved" depends =" svn-init"
296+ description =" mark the tree as resolved" >
297+ <svn >
298+ <arg value =" resolve" />
299+ </svn >
300+ </target >
301+
302+ <!--
303+ svn diff \
304+ https://svn.apache.org/repos/asf/hadoop/core/trunk \
305+ https://svn.apache.org/repos/asf/hadoop/core/branches/HADOOP-3628-2
306+ -->
307+ <target name =" svn-diff-trunk" depends =" svn-init"
308+ description =" diff against trunk" >
309+ <svn >
310+ <arg value =" diff" />
311+ <arg value =" ${trunk}" />
312+ <arg value =" ${branch}" />
313+ </svn >
314+ </target >
315+
316+
317+ <target name =" svn-create-changelist" depends =" svn-init"
318+ description =" Create a changelist of everything we want in the big patch" >
319+ <property name =" hdfs/server"
320+ value =" src/java/org/apache/hadoop/hdfs/server" />
321+ <property name =" test/hdfs"
322+ value =" src/test/hdfs/org/apache/hadoop/hdfs" />
323+ <svn >
324+ <arg value =" changelist" />
325+ <arg value =" ${issue}" />
326+
327+ <arg value =" ${hdfs/server}/datanode/DataNode.java" />
328+ <arg value =" ${hdfs/server}/datanode/FSDataset.java" />
329+ <arg value =" ${hdfs/server}/namenode/BackupNode.java" />
330+ <arg value =" ${hdfs/server}/namenode/FSNamesystem.java" />
331+ <arg value =" ${hdfs/server}/namenode/NameNode.java" />
332+ <arg value =" ${hdfs/server}/namenode/PendingReplicationBlocks.java" />
333+ <arg value =" ${test/hdfs}/server/namenode/TestReplicationPolicy.java" />
334+ </svn >
335+ </target >
336+
337+
338+ <!--
339+
340+ -->
341+ <target name =" svn-diff-src" depends =" svn-init"
342+ description =" diff against trunk" >
343+ <echo > Writing to ${patch.file}</echo >
344+ <svn output =" ${patch.file}" >
345+ <arg value =" diff" />
346+ <arg value =" ${trunk}/src" />
347+ <arg value =" ${branch}/src" />
348+ <arg value =" --changelist" />
349+ <arg value =" ${issue}" />
350+ </svn >
351+ </target >
352+
353+ </project >
0 commit comments