Class org.springframework.data.hadoop.mapreduce.JarTests

11

tests

0

failures

2.374s

duration

100%

successful

Tests

TestDurationResult
testBadMainClassArgs0.137spassed
testBadMainClassConfiguration0.152spassed
testBadMainClassLoaded0.213spassed
testBadMainClassPreExit0.149spassed
testClassVisibility0.153spassed
testExitTrap0.146spassed
testOtherMainClassArgs0.142spassed
testOtherMainClassConfiguration0.148spassed
testOtherMainClassLoaded0.131spassed
testTasklet0.658spassed
testTaskletScope0.345spassed

Standard output

07:10:24,919  INFO Test worker context.TestContextManager - @TestExecutionListeners is not present for class [class org.springframework.data.hadoop.mapreduce.JarTests]: using defaults.
07:10:24,924  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/mapreduce/JarTests-context.xml]
07:10:24,958  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/hadoop-ctx.xml]
07:10:24,987  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/batch-common.xml]
07:10:25,008  INFO Test worker support.DefaultListableBeanFactory - Overriding bean definition for bean 'mainJob': replacing [Generic bean: class [org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null] with [Generic bean: class [org.springframework.batch.core.configuration.xml.JobParserJobFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null]
07:10:25,009  INFO Test worker support.GenericApplicationContext - Refreshing org.springframework.context.support.GenericApplicationContext@77164bf6: startup date [Thu Sep 05 07:10:25 PDT 2013]; root of context hierarchy
07:10:25,025  INFO Test worker config.PropertyPlaceholderConfigurer - Loading properties file from class path resource [test.properties]
07:10:25,035  INFO Test worker support.DefaultListableBeanFactory - Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@5b013dc8: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:25,112  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@1960869908
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-e27bd54d-2839-4510-9621-b78faaf13631
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:25,168  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@285056295
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-c101431f-6f48-474b-9639-81627064fdc7
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:25,220  INFO Test worker mapreduce.JarRunner - Invoking [class test.OtherMainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[42]]
07:10:25,316  INFO Test worker support.SimpleJobLauncher - Job: [FlowJob: [name=mainJob]] launched with the following parameters: [{}]
07:10:25,328  INFO Test worker job.SimpleStepHandler - Executing step: [cleanup]
07:10:25,356  INFO Test worker mapreduce.JarTasklet - Invoking [class org.apache.hadoop.examples.ExampleDriver] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/mini-hadoop-examples.jar] with args [[wordcount, /ide-test/input/word, /ide-test/output/wc]]
07:10:25,495  INFO Test worker mapred.JobClient - Cleaning up the staging area file:/tmp/hadoop-bamboo/mapred/staging/bamboo1411872022/.staging/job_local1411872022_0001
07:10:25,495 ERROR Test worker security.UserGroupInformation - PriviledgedActionException as:bamboo cause:org.apache.hadoop.mapreduce.lib.input.InvalidInputException: Input path does not exist: file:/ide-test/input/word
07:10:25,539  INFO Test worker job.SimpleStepHandler - Executing step: [wordcount]
07:10:25,575  INFO Test worker support.SimpleJobLauncher - Job: [FlowJob: [name=mainJob]] completed with the following parameters: [{}] and the following status: [COMPLETED]
07:10:25,575  INFO Test worker support.GenericApplicationContext - Closing org.springframework.context.support.GenericApplicationContext@77164bf6: startup date [Thu Sep 05 07:10:25 PDT 2013]; root of context hierarchy
07:10:25,576  INFO Test worker support.DefaultListableBeanFactory - Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@5b013dc8: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:25,583  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/mapreduce/JarTests-context.xml]
07:10:25,619  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/hadoop-ctx.xml]
07:10:25,647  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/batch-common.xml]
07:10:25,662  INFO Test worker support.DefaultListableBeanFactory - Overriding bean definition for bean 'mainJob': replacing [Generic bean: class [org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null] with [Generic bean: class [org.springframework.batch.core.configuration.xml.JobParserJobFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null]
07:10:25,663  INFO Test worker support.GenericApplicationContext - Refreshing org.springframework.context.support.GenericApplicationContext@2d97c1b1: startup date [Thu Sep 05 07:10:25 PDT 2013]; root of context hierarchy
07:10:25,674  INFO Test worker config.PropertyPlaceholderConfigurer - Loading properties file from class path resource [test.properties]
07:10:25,684  INFO Test worker support.DefaultListableBeanFactory - Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@46e337b2: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:25,750  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@624528707
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-155a4a6b-ad8d-453f-8de5-3f4f43af3ec8
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:25,802  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@803645472
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-936ee3f4-6d26-4695-a301-83dc0f8e3e7e
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:25,852  INFO Test worker mapreduce.JarRunner - Invoking [class test.OtherMainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[42]]
07:10:25,922  INFO Test worker support.GenericApplicationContext - Closing org.springframework.context.support.GenericApplicationContext@2d97c1b1: startup date [Thu Sep 05 07:10:25 PDT 2013]; root of context hierarchy
07:10:25,923  INFO Test worker support.DefaultListableBeanFactory - Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@46e337b2: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:25,927  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/mapreduce/JarTests-context.xml]
07:10:25,963  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/hadoop-ctx.xml]
07:10:25,987  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/batch-common.xml]
07:10:26,001  INFO Test worker support.DefaultListableBeanFactory - Overriding bean definition for bean 'mainJob': replacing [Generic bean: class [org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null] with [Generic bean: class [org.springframework.batch.core.configuration.xml.JobParserJobFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null]
07:10:26,001  INFO Test worker support.GenericApplicationContext - Refreshing org.springframework.context.support.GenericApplicationContext@23493578: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,013  INFO Test worker config.PropertyPlaceholderConfigurer - Loading properties file from class path resource [test.properties]
07:10:26,022  INFO Test worker support.DefaultListableBeanFactory - Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@698c10b8: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,063  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@240685921
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-0357b501-1b39-4743-8e02-0ce88c91e281
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,086  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@356678707
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-42bb7e8e-cb90-4d77-8d57-6f830cdaf535
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,107  INFO Test worker mapreduce.JarRunner - Invoking [class test.OtherMainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[42]]
07:10:26,138  INFO Test worker support.GenericApplicationContext - Closing org.springframework.context.support.GenericApplicationContext@23493578: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,138  INFO Test worker support.DefaultListableBeanFactory - Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@698c10b8: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,140  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/mapreduce/JarTests-context.xml]
07:10:26,154  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/hadoop-ctx.xml]
07:10:26,165  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/batch-common.xml]
07:10:26,171  INFO Test worker support.DefaultListableBeanFactory - Overriding bean definition for bean 'mainJob': replacing [Generic bean: class [org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null] with [Generic bean: class [org.springframework.batch.core.configuration.xml.JobParserJobFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null]
07:10:26,171  INFO Test worker support.GenericApplicationContext - Refreshing org.springframework.context.support.GenericApplicationContext@7d582674: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,176  INFO Test worker config.PropertyPlaceholderConfigurer - Loading properties file from class path resource [test.properties]
07:10:26,180  INFO Test worker support.DefaultListableBeanFactory - Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@91fa7e2: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,208  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@1135846891
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-13797788-fc45-4d96-ad65-1e4af916bffc
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,232  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@703663157
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-b1c52d01-dd70-454d-bba4-4a8ff3ac822d
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,253  INFO Test worker mapreduce.JarRunner - Invoking [class test.OtherMainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[42]]
07:10:26,291  INFO Test worker support.GenericApplicationContext - Closing org.springframework.context.support.GenericApplicationContext@7d582674: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,291  INFO Test worker support.DefaultListableBeanFactory - Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@91fa7e2: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,293  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/mapreduce/JarTests-context.xml]
07:10:26,307  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/hadoop-ctx.xml]
07:10:26,318  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/batch-common.xml]
07:10:26,324  INFO Test worker support.DefaultListableBeanFactory - Overriding bean definition for bean 'mainJob': replacing [Generic bean: class [org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null] with [Generic bean: class [org.springframework.batch.core.configuration.xml.JobParserJobFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null]
07:10:26,324  INFO Test worker support.GenericApplicationContext - Refreshing org.springframework.context.support.GenericApplicationContext@74247cc2: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,330  INFO Test worker config.PropertyPlaceholderConfigurer - Loading properties file from class path resource [test.properties]
07:10:26,334  INFO Test worker support.DefaultListableBeanFactory - Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@4a8f5f75: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,362  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@734383365
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-e7cfac80-92dd-45b0-affd-ddc6a5dc7d1c
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,386  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@681477373
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-10afc95d-cf36-4453-b117-78dfc646b1d9
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,412  INFO Test worker mapreduce.JarRunner - Invoking [class test.OtherMainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[42]]
07:10:26,444  INFO Test worker support.GenericApplicationContext - Closing org.springframework.context.support.GenericApplicationContext@74247cc2: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,444  INFO Test worker support.DefaultListableBeanFactory - Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@4a8f5f75: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,446  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/mapreduce/JarTests-context.xml]
07:10:26,459  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/hadoop-ctx.xml]
07:10:26,469  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/batch-common.xml]
07:10:26,475  INFO Test worker support.DefaultListableBeanFactory - Overriding bean definition for bean 'mainJob': replacing [Generic bean: class [org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null] with [Generic bean: class [org.springframework.batch.core.configuration.xml.JobParserJobFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null]
07:10:26,475  INFO Test worker support.GenericApplicationContext - Refreshing org.springframework.context.support.GenericApplicationContext@6033fec0: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,480  INFO Test worker config.PropertyPlaceholderConfigurer - Loading properties file from class path resource [test.properties]
07:10:26,484  INFO Test worker support.DefaultListableBeanFactory - Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@47a34181: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,510  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@1461651289
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-9918586c-9e54-41cd-86f6-7542d60dac09
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,531  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@620106128
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-f885f4b5-84ca-48dd-b8ba-5aa5d4851573
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,551  INFO Test worker mapreduce.JarRunner - Invoking [class test.OtherMainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[42]]
07:10:26,581  INFO Test worker support.GenericApplicationContext - Closing org.springframework.context.support.GenericApplicationContext@6033fec0: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,582  INFO Test worker support.DefaultListableBeanFactory - Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@47a34181: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,583  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/mapreduce/JarTests-context.xml]
07:10:26,597  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/hadoop-ctx.xml]
07:10:26,612  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/batch-common.xml]
07:10:26,619  INFO Test worker support.DefaultListableBeanFactory - Overriding bean definition for bean 'mainJob': replacing [Generic bean: class [org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null] with [Generic bean: class [org.springframework.batch.core.configuration.xml.JobParserJobFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null]
07:10:26,620  INFO Test worker support.GenericApplicationContext - Refreshing org.springframework.context.support.GenericApplicationContext@3a8c5214: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,625  INFO Test worker config.PropertyPlaceholderConfigurer - Loading properties file from class path resource [test.properties]
07:10:26,630  INFO Test worker support.DefaultListableBeanFactory - Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@2d73b676: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,657  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@470905757
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-50dadace-d4d2-420e-a46b-2ef7eb4398ce
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,682  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@1706393036
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-12db6bee-1c06-4548-8882-b3e036096914
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,702  INFO Test worker mapreduce.JarRunner - Invoking [class test.OtherMainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[42]]
07:10:26,731  INFO Test worker support.GenericApplicationContext - Closing org.springframework.context.support.GenericApplicationContext@3a8c5214: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,731  INFO Test worker support.DefaultListableBeanFactory - Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@2d73b676: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,732  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/mapreduce/JarTests-context.xml]
07:10:26,746  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/hadoop-ctx.xml]
07:10:26,756  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/batch-common.xml]
07:10:26,763  INFO Test worker support.DefaultListableBeanFactory - Overriding bean definition for bean 'mainJob': replacing [Generic bean: class [org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null] with [Generic bean: class [org.springframework.batch.core.configuration.xml.JobParserJobFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null]
07:10:26,763  INFO Test worker support.GenericApplicationContext - Refreshing org.springframework.context.support.GenericApplicationContext@60ce717a: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,769  INFO Test worker config.PropertyPlaceholderConfigurer - Loading properties file from class path resource [test.properties]
07:10:26,774  INFO Test worker support.DefaultListableBeanFactory - Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@349955ab: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,802  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@683612021
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-bf7b0d42-0442-4362-8569-be70b03ccb68
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,823  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@1595335682
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-cc9b6f65-3cac-40bb-8400-c7c706bae0d5
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,848  INFO Test worker mapreduce.JarRunner - Invoking [class test.OtherMainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[42]]
07:10:26,878  INFO Test worker support.GenericApplicationContext - Closing org.springframework.context.support.GenericApplicationContext@60ce717a: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,878  INFO Test worker support.DefaultListableBeanFactory - Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@349955ab: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,879  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/mapreduce/JarTests-context.xml]
07:10:26,892  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/hadoop-ctx.xml]
07:10:26,903  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/batch-common.xml]
07:10:26,909  INFO Test worker support.DefaultListableBeanFactory - Overriding bean definition for bean 'mainJob': replacing [Generic bean: class [org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null] with [Generic bean: class [org.springframework.batch.core.configuration.xml.JobParserJobFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null]
07:10:26,909  INFO Test worker support.GenericApplicationContext - Refreshing org.springframework.context.support.GenericApplicationContext@2d2c6ed8: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:26,914  INFO Test worker config.PropertyPlaceholderConfigurer - Loading properties file from class path resource [test.properties]
07:10:26,917  INFO Test worker support.DefaultListableBeanFactory - Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@340e92fe: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:26,943  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@1040850463
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-e3ad4cd6-90ec-47ef-bc70-d54a1c888c4b
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,963  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@1919217432
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-940cb17c-dd1c-464d-9f3c-4ad76a89149d
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:26,982  INFO Test worker mapreduce.JarRunner - Invoking [class test.OtherMainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[42]]
07:10:27,009  INFO Test worker support.GenericApplicationContext - Closing org.springframework.context.support.GenericApplicationContext@2d2c6ed8: startup date [Thu Sep 05 07:10:26 PDT 2013]; root of context hierarchy
07:10:27,010  INFO Test worker support.DefaultListableBeanFactory - Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@340e92fe: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:27,011  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/mapreduce/JarTests-context.xml]
07:10:27,024  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/hadoop-ctx.xml]
07:10:27,034  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/batch-common.xml]
07:10:27,040  INFO Test worker support.DefaultListableBeanFactory - Overriding bean definition for bean 'mainJob': replacing [Generic bean: class [org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null] with [Generic bean: class [org.springframework.batch.core.configuration.xml.JobParserJobFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null]
07:10:27,040  INFO Test worker support.GenericApplicationContext - Refreshing org.springframework.context.support.GenericApplicationContext@2e0fadfa: startup date [Thu Sep 05 07:10:27 PDT 2013]; root of context hierarchy
07:10:27,045  INFO Test worker config.PropertyPlaceholderConfigurer - Loading properties file from class path resource [test.properties]
07:10:27,049  INFO Test worker support.DefaultListableBeanFactory - Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@65fbadc2: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:27,075  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@285416728
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-2d449a15-4a1f-44bb-b21b-afc4f6725847
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:27,099  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@1234890038
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-671e26bf-f375-42bd-bbe2-2d5121bd823e
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:27,119  INFO Test worker mapreduce.JarRunner - Invoking [class test.OtherMainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[42]]
07:10:27,157  INFO Test worker support.GenericApplicationContext - Closing org.springframework.context.support.GenericApplicationContext@2e0fadfa: startup date [Thu Sep 05 07:10:27 PDT 2013]; root of context hierarchy
07:10:27,157  INFO Test worker support.DefaultListableBeanFactory - Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@65fbadc2: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:27,159  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/mapreduce/JarTests-context.xml]
07:10:27,173  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/hadoop-ctx.xml]
07:10:27,184  INFO Test worker xml.XmlBeanDefinitionReader - Loading XML bean definitions from class path resource [org/springframework/data/hadoop/batch-common.xml]
07:10:27,191  INFO Test worker support.DefaultListableBeanFactory - Overriding bean definition for bean 'mainJob': replacing [Generic bean: class [org.springframework.batch.core.configuration.xml.SimpleFlowFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null] with [Generic bean: class [org.springframework.batch.core.configuration.xml.JobParserJobFactoryBean]; scope=; abstract=false; lazyInit=false; autowireMode=0; dependencyCheck=0; autowireCandidate=true; primary=false; factoryBeanName=null; factoryMethodName=null; initMethodName=null; destroyMethodName=null]
07:10:27,191  INFO Test worker support.GenericApplicationContext - Refreshing org.springframework.context.support.GenericApplicationContext@55433fd1: startup date [Thu Sep 05 07:10:27 PDT 2013]; root of context hierarchy
07:10:27,196  INFO Test worker config.PropertyPlaceholderConfigurer - Loading properties file from class path resource [test.properties]
07:10:27,201  INFO Test worker support.DefaultListableBeanFactory - Pre-instantiating singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@53aa4bb4: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
07:10:27,228  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@124086011
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-4f874adb-e8f1-4ef4-a789-1ac90dfa321f
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:27,250  INFO Test worker mapreduce.JarRunner - Invoking [class test.MainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[bad]]
*** New Config is ***Config@722820328
Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml, Custom-cfg-for- class path resource [some-jar.jar]-6fc57cb6-bcdd-4092-b29f-0c9e28424df0
{fs.s3n.impl=org.apache.hadoop.fs.s3native.NativeS3FileSystem, mapreduce.job.counters.max=120, mapred.task.cache.levels=2, mapreduce.job.restart.recover=true, dfs.client.use.datanode.hostname=false, map.sort.class=org.apache.hadoop.util.QuickSort, hadoop.tmp.dir=/tmp/hadoop-${user.name}, hadoop.native.lib=true, dfs.namenode.decommission.nodes.per.interval=5, dfs.https.need.client.auth=false, dfs.datanode.drop.cache.behind.writes=false, ipc.client.idlethreshold=4000, mapred.system.dir=${hadoop.tmp.dir}/mapred/system, dfs.datanode.data.dir.perm=755, cfg=main, mapred.job.tracker.persist.jobstatus.hours=0, dfs.namenode.logging.level=info, dfs.datanode.address=0.0.0.0:50010, dfs.block.access.token.enable=false, io.skip.checksum.errors=false, fs.default.name=hdfs://w1-kodiak-hd023:8020, mapred.cluster.reduce.memory.mb=-1, mapred.child.tmp=./tmp, fs.har.impl.disable.cache=true, dfs.safemode.threshold.pct=0.999f, mapred.skip.reduce.max.skip.groups=0, dfs.namenode.handler.count=10, mapred.heartbeats.in.second=100, dfs.blockreport.initialDelay=0, mapred.tasktracker.dns.nameserver=default, io.sort.factor=10, dfs.datanode.drop.cache.behind.reads=false, mapred.task.timeout=600000, mapred.max.tracker.failures=4, hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory, mapred.job.tracker.jobhistory.lru.cache.size=5, fs.hdfs.impl=org.apache.hadoop.hdfs.DistributedFileSystem, mapred.skip.map.auto.incr.proc.count=true, dfs.namenode.stale.datanode.interval=30000, dfs.block.access.key.update.interval=600, mapreduce.job.complete.cancel.delegation.tokens=true, io.mapfile.bloom.size=1048576, mapreduce.reduce.shuffle.connect.timeout=180000, dfs.safemode.extension=30000, mapred.jobtracker.blacklist.fault-timeout-window=180, dfs.namenode.write.stale.datanode.ratio=0.5f, hadoop.skip.worker.version.check=false, tasktracker.http.threads=40, net.topology.impl=org.apache.hadoop.net.NetworkTopology, mapred.job.shuffle.merge.percent=0.66, fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem, io.bytes.per.checksum=512, dfs.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, mapred.output.compress=false, mapred.combine.recordsBeforeProgress=10000, mapred.healthChecker.script.timeout=600000, topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping, dfs.https.server.keystore.resource=ssl-server.xml, mapred.reduce.slowstart.completed.maps=0.05, mapred.reduce.max.attempts=4, dfs.namenode.safemode.min.datanodes=0, mapreduce.ifile.readahead.bytes=4194304, fs.ramfs.impl=org.apache.hadoop.fs.InMemoryFileSystem, dfs.block.access.token.lifetime=600, dfs.name.edits.dir=${dfs.name.dir}, mapred.skip.map.max.skip.records=0, mapred.cluster.map.memory.mb=-1, hadoop.security.group.mapping=org.apache.hadoop.security.ShellBasedUnixGroupsMapping, mapred.job.tracker.persist.jobstatus.dir=/jobtracker/jobsInfo, mapred.jar=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, fs.s3.buffer.dir=${hadoop.tmp.dir}/s3, dfs.block.size=67108864, job.end.retry.attempts=0, fs.file.impl=org.apache.hadoop.fs.LocalFileSystem, dfs.namenode.avoid.write.stale.datanode=false, dfs.datanode.max.xcievers=4096, mapred.local.dir.minspacestart=0, mapred.output.compression.type=RECORD, dfs.datanode.ipc.address=0.0.0.0:50020, dfs.permissions=true, topology.script.number.args=100, mapreduce.job.counters.groups.max=50, io.mapfile.bloom.error.rate=0.005, mapred.cluster.max.reduce.memory.mb=-1, mapred.max.tracker.blacklists=4, mapred.task.profile.maps=0-2, mapred.jobtracker.jobSchedulable=org.apache.hadoop.mapred.JobSchedulable, dfs.datanode.https.address=0.0.0.0:50475, mapred.userlog.retain.hours=24, dfs.secondary.http.address=0.0.0.0:50090, dfs.namenode.replication.work.multiplier.per.iteration=2, dfs.replication.max=512, mapred.job.tracker.persist.jobstatus.active=false, hadoop.security.authorization=false, local.cache.size=10737418240, dfs.namenode.delegation.token.renew-interval=86400000, mapred.min.split.size=0, mapred.map.tasks=2, mapred.child.java.opts=-Xmx200m, dfs.https.client.keystore.resource=ssl-client.xml, mapred.job.queue.name=default, mapred.job.tracker.retiredjobs.cache.size=1000, dfs.https.address=0.0.0.0:50470, dfs.balance.bandwidthPerSec=1048576, ipc.server.listen.queue.size=128, dfs.namenode.invalidate.work.pct.per.iteration=0.32f, mapred.inmem.merge.threshold=1000, job.end.retry.interval=30000, mapreduce.tasktracker.outofband.heartbeat.damper=1000000, hadoop.security.use-weak-http-crypto=false, mapred.skip.attempts.to.start.skipping=2, fs.checkpoint.dir=${hadoop.tmp.dir}/dfs/namesecondary, dfs.namenode.avoid.read.stale.datanode=false, mapred.reduce.tasks=1, mapred.merge.recordsBeforeProgress=10000, mapred.userlog.limit.kb=0, mapred.job.reduce.memory.mb=-1, webinterface.private.actions=false, dfs.max.objects=0, hadoop.security.token.service.use_ip=true, mapred.job.shuffle.input.buffer.percent=0.70, io.sort.spill.percent=0.80, dfs.datanode.dns.nameserver=default, mapred.map.tasks.speculative.execution=true, hadoop.http.authentication.type=simple, hadoop.util.hash.type=murmur, hadoop.security.instrumentation.requires.admin=false, dfs.blockreport.intervalMsec=3600000, dfs.datanode.readahead.bytes=4193404, mapred.map.max.attempts=4, mapreduce.job.acl-view-job= , mapreduce.ifile.readahead=true, mapred.jobtracker.nodegroup.aware=false, dfs.client.block.write.retries=3, mapred.job.tracker.handler.count=10, mapreduce.reduce.shuffle.read.timeout=180000, mapred.tasktracker.expiry.interval=600000, dfs.secondary.namenode.kerberos.internal.spnego.principal=${dfs.web.authentication.kerberos.principal}, dfs.https.enable=false, mapred.jobtracker.maxtasks.per.job=-1, mapred.jobtracker.job.history.block.size=3145728, keep.failed.task.files=false, dfs.datanode.use.datanode.hostname=false, dfs.datanode.failed.volumes.tolerated=0, ipc.client.tcpnodelay=false, mapred.task.profile.reduces=0-2, io.map.index.skip=0, mapred.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, hadoop.http.authentication.token.validity=36000, ipc.server.tcpnodelay=false, hadoop.jetty.logs.serve.aliases=true, mapred.jobtracker.blacklist.fault-bucket-width=15, dfs.namenode.delegation.key.update-interval=86400000, mapred.used.genericoptionsparser=true, mapred.job.map.memory.mb=-1, dfs.default.chunk.view.size=32768, hadoop.logfile.size=10000000, mapred.reduce.tasks.speculative.execution=true, mapreduce.tasktracker.outofband.heartbeat=false, mapreduce.reduce.input.limit=-1, dfs.datanode.du.reserved=0, dfs.datanode.sync.behind.writes=false, hadoop.security.authentication=simple, fs.checkpoint.period=3600, mapred.job.reuse.jvm.num.tasks=1, dfs.web.ugi=webuser,webgroup, mapred.jobtracker.completeuserjobs.maximum=100, dfs.df.interval=60000, mapred.task.tracker.task-controller=org.apache.hadoop.mapred.DefaultTaskController, dfs.data.dir=${hadoop.tmp.dir}/dfs/data, fs.s3.maxRetries=4, dfs.datanode.dns.interface=default, mapred.cluster.max.map.memory.mb=-1, mapreduce.reduce.shuffle.maxfetchfailures=10, mapreduce.job.acl-modify-job= , dfs.permissions.supergroup=supergroup, mapred.local.dir=${hadoop.tmp.dir}/mapred/local, fs.hftp.impl=org.apache.hadoop.hdfs.HftpFileSystem, fs.s3.sleepTimeSeconds=10, fs.trash.interval=0, dfs.replication.min=1, mapred.submit.replication=10, fs.har.impl=org.apache.hadoop.fs.HarFileSystem, hadoop.relaxed.worker.version.check=false, mapred.map.output.compression.codec=org.apache.hadoop.io.compress.DefaultCodec, someparam=somevalue, mapred.tasktracker.dns.interface=default, dfs.http.address=0.0.0.0:50070, dfs.namenode.decommission.interval=30, mapred.job.tracker=w1-kodiak-hd023:51130, dfs.heartbeat.interval=3, hadoop.http.authentication.signature.secret.file=${user.home}/hadoop-http-auth-signature-secret, io.seqfile.sorter.recordlimit=1000000, dfs.name.dir=${hadoop.tmp.dir}/dfs/name, mapred.jobtracker.taskScheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler, mapred.line.input.format.linespermap=1, dfs.datanode.http.address=0.0.0.0:50075, fs.webhdfs.impl=org.apache.hadoop.hdfs.web.WebHdfsFileSystem, dfs.image.transfer.bandwidthPerSec=0, mapred.local.dir.minspacekill=0, dfs.replication.interval=3, io.sort.record.percent=0.05, hadoop.http.authentication.kerberos.principal=HTTP/localhost@LOCALHOST, fs.kfs.impl=org.apache.hadoop.fs.kfs.KosmosFileSystem, mapred.temp.dir=${hadoop.tmp.dir}/mapred/temp, mapred.tasktracker.reduce.tasks.maximum=2, dfs.replication=3, fs.checkpoint.edits.dir=${fs.checkpoint.dir}, mapreduce.jobhistory.max-age-ms=2592000000, mapred.tasktracker.tasks.sleeptime-before-sigkill=5000, mapred.job.reduce.input.buffer.percent=0.0, mapred.tasktracker.indexcache.mb=10, dfs.webhdfs.enabled=false, mapreduce.job.split.metainfo.maxsize=10000000, hadoop.logfile.count=10, mapred.skip.reduce.auto.incr.proc.count=true, tmpfiles=file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar, io.seqfile.compress.blocksize=1000000, fs.s3.block.size=67108864, mapred.tasktracker.taskmemorymanager.monitoring-interval=5000, hadoop.http.authentication.simple.anonymous.allowed=true, mapred.acls.enabled=false, mapred.queue.default.state=RUNNING, mapreduce.jobtracker.staging.root.dir=${hadoop.tmp.dir}/mapred/staging, mapred.queue.names=default, fs.hsftp.impl=org.apache.hadoop.hdfs.HsftpFileSystem, dfs.access.time.precision=3600000, mapred.task.tracker.http.address=0.0.0.0:50060, mapred.disk.healthChecker.interval=60000, mapred.reduce.parallel.copies=5, io.seqfile.lazydecompress=true, io.sort.mb=100, ipc.client.connection.maxidletime=10000, mapred.task.tracker.report.address=127.0.0.1:0, mapred.compress.map.output=false, hadoop.security.uid.cache.secs=14400, mapred.healthChecker.interval=60000, ipc.client.kill.max=10, ipc.client.connect.max.retries=10, fs.s3.impl=org.apache.hadoop.fs.s3.S3FileSystem, mapred.user.jobconf.limit=5242880, mapreduce.job.counters.group.name.max=128, mapred.job.tracker.http.address=0.0.0.0:50030, io.file.buffer.size=4096, mapred.jobtracker.restart.recover=false, io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, dfs.datanode.handler.count=3, mapred.task.profile=false, dfs.replication.considerLoad=true, mapreduce.jobhistory.cleaner.interval-ms=86400000, dfs.namenode.edits.toleration.length=0, jobclient.output.filter=FAILED, web=war, dfs.namenode.delegation.token.max-lifetime=604800000, hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab, mapred.tasktracker.map.tasks.maximum=2, mapreduce.job.counters.counter.name.max=64, io.compression.codecs=org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.SnappyCodec, fs.checkpoint.size=67108864}
*** Received args ***[bad]
07:10:27,272  INFO Test worker mapreduce.JarRunner - Invoking [class test.OtherMainClass] from jar [file:/data/bamboo-home/xml-data/build-dir/SPRINGDATAHADOOP-HDP13CENTOS0-JOB1/build/resources/test/some-jar.jar] with args [[42]]
07:10:27,300  INFO Test worker support.GenericApplicationContext - Closing org.springframework.context.support.GenericApplicationContext@55433fd1: startup date [Thu Sep 05 07:10:27 PDT 2013]; root of context hierarchy
07:10:27,300  INFO Test worker support.DefaultListableBeanFactory - Destroying singletons in org.springframework.beans.factory.support.DefaultListableBeanFactory@53aa4bb4: defining beans [ppc,hadoopFs,hadoopResourceLoader,hadoopConfiguration,cfg-init,fs-init,rl-init,org.springframework.data.hadoop.scripting.HdfsScriptRunner#0,jobRepository,transactionManager,jobLauncher,taskExecutor,bad-main-class,other-class,org.springframework.batch.core.scope.internalStepScope,org.springframework.beans.factory.config.CustomEditorConfigurer,org.springframework.batch.core.configuration.xml.CoreNamespacePostProcessor,tasklet-ns,cleanup,org.springframework.data.hadoop.scripting.HdfsScriptRunner#1,script-tasklet,wordcount,mainJob,org.springframework.context.annotation.internalConfigurationAnnotationProcessor,org.springframework.context.annotation.internalAutowiredAnnotationProcessor,org.springframework.context.annotation.internalRequiredAnnotationProcessor,org.springframework.context.annotation.internalCommonAnnotationProcessor]; root of factory hierarchy
Removing incorrect key [org.springframework.data.hadoop.jar.cfg] w/ value Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml
Removing incorrect key [org.springframework.data.hadoop.jar.other.args] w/ value [Ljava.lang.String;@5bbd46e4
Removing incorrect key [org.springframework.data.jar.exit.exception] w/ value org.springframework.data.hadoop.mapreduce.ExecutionUtils$ExitTrapped
Removing incorrect key [org.springframework.data.hadoop.jar.args] w/ value [Ljava.lang.String;@7abdb4f
Removing incorrect key [org.springframework.data.hadoop.jar.other.cfg] w/ value Configuration: core-default.xml, core-site.xml, mapred-default.xml, mapred-site.xml, hdfs-default.xml, hdfs-site.xml

Standard error

org.apache.hadoop.mapreduce.lib.input.InvalidInputException: Input path does not exist: file:/ide-test/input/word
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.listStatus(FileInputFormat.java:235)
	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:252)
	at org.apache.hadoop.mapred.JobClient.writeNewSplits(JobClient.java:1054)
	at org.apache.hadoop.mapred.JobClient.writeSplits(JobClient.java:1071)
	at org.apache.hadoop.mapred.JobClient.access$700(JobClient.java:179)
	at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:983)
	at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:936)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:416)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1190)
	at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:936)
	at org.apache.hadoop.mapreduce.Job.submit(Job.java:550)
	at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:580)
	at org.apache.hadoop.examples.WordCount.main(WordCount.java:67)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:616)
	at org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.java:68)
	at org.apache.hadoop.util.ProgramDriver.driver(ProgramDriver.java:139)
	at org.apache.hadoop.examples.ExampleDriver.main(ExampleDriver.java:64)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:616)
	at org.springframework.util.ReflectionUtils.invokeMethod(ReflectionUtils.java:186)
	at org.springframework.data.hadoop.mapreduce.JarExecutor.invokeTargetObject(JarExecutor.java:71)
	at org.springframework.data.hadoop.mapreduce.HadoopCodeExecutor.invokeTarget(HadoopCodeExecutor.java:185)
	at org.springframework.data.hadoop.mapreduce.HadoopCodeExecutor.runCode(HadoopCodeExecutor.java:102)
	at org.springframework.data.hadoop.mapreduce.JarTasklet.execute(JarTasklet.java:32)
	at org.springframework.batch.core.step.tasklet.TaskletStep$ChunkTransactionCallback.doInTransaction(TaskletStep.java:386)
	at org.springframework.transaction.support.TransactionTemplate.execute(TransactionTemplate.java:130)
	at org.springframework.batch.core.step.tasklet.TaskletStep$2.doInChunkContext(TaskletStep.java:264)
	at org.springframework.batch.core.scope.context.StepContextRepeatCallback.doInIteration(StepContextRepeatCallback.java:76)
	at org.springframework.batch.repeat.support.RepeatTemplate.getNextResult(RepeatTemplate.java:367)
	at org.springframework.batch.repeat.support.RepeatTemplate.executeInternal(RepeatTemplate.java:214)
	at org.springframework.batch.repeat.support.RepeatTemplate.iterate(RepeatTemplate.java:143)
	at org.springframework.batch.core.step.tasklet.TaskletStep.doExecute(TaskletStep.java:250)
	at org.springframework.batch.core.step.AbstractStep.execute(AbstractStep.java:195)
	at org.springframework.batch.core.job.SimpleStepHandler.handleStep(SimpleStepHandler.java:135)
	at org.springframework.batch.core.job.flow.JobFlowExecutor.executeStep(JobFlowExecutor.java:61)
	at org.springframework.batch.core.job.flow.support.state.StepState.handle(StepState.java:60)
	at org.springframework.batch.core.job.flow.support.SimpleFlow.resume(SimpleFlow.java:144)
	at org.springframework.batch.core.job.flow.support.SimpleFlow.start(SimpleFlow.java:124)
	at org.springframework.batch.core.job.flow.FlowJob.doExecute(FlowJob.java:135)
	at org.springframework.batch.core.job.AbstractJob.execute(AbstractJob.java:293)
	at org.springframework.batch.core.launch.support.SimpleJobLauncher$1.run(SimpleJobLauncher.java:120)
	at org.springframework.core.task.SyncTaskExecutor.execute(SyncTaskExecutor.java:48)
	at org.springframework.batch.core.launch.support.SimpleJobLauncher.run(SimpleJobLauncher.java:114)
	at org.springframework.data.hadoop.batch.JobsTrigger.startJobs(JobsTrigger.java:49)
	at org.springframework.data.hadoop.batch.JobsTrigger.startJobs(JobsTrigger.java:38)
	at org.springframework.data.hadoop.mapreduce.JarTests.testTasklet(JarTests.java:73)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:616)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
	at org.springframework.test.context.junit4.statements.RunBeforeTestMethodCallbacks.evaluate(RunBeforeTestMethodCallbacks.java:74)
	at org.springframework.test.context.junit4.statements.RunAfterTestMethodCallbacks.evaluate(RunAfterTestMethodCallbacks.java:82)
	at org.springframework.test.context.junit4.statements.SpringRepeat.evaluate(SpringRepeat.java:72)
	at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.runChild(SpringJUnit4ClassRunner.java:231)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
	at org.springframework.test.context.junit4.statements.RunBeforeTestClassCallbacks.evaluate(RunBeforeTestClassCallbacks.java:61)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:31)
	at org.springframework.test.context.junit4.statements.RunAfterTestClassCallbacks.evaluate(RunAfterTestClassCallbacks.java:70)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
	at org.springframework.test.context.junit4.SpringJUnit4ClassRunner.run(SpringJUnit4ClassRunner.java:174)
	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.runTestClass(JUnitTestClassExecuter.java:55)
	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.execute(JUnitTestClassExecuter.java:42)
	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassProcessor.processTestClass(JUnitTestClassProcessor.java:75)
	at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:49)
	at sun.reflect.GeneratedMethodAccessor72.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:616)
	at org.gradle.messaging.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35)
	at org.gradle.messaging.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
	at org.gradle.messaging.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:32)
	at org.gradle.messaging.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:93)
	at sun.proxy.$Proxy2.processTestClass(Unknown Source)
	at org.gradle.api.internal.tasks.testing.worker.TestWorker.processTestClass(TestWorker.java:103)
	at sun.reflect.GeneratedMethodAccessor71.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:616)
	at org.gradle.messaging.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35)
	at org.gradle.messaging.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
	at org.gradle.messaging.remote.internal.TypeCastDispatch.dispatch(TypeCastDispatch.java:30)
	at org.gradle.messaging.remote.internal.WorkerProtocol.handleIncoming(WorkerProtocol.java:53)
	at org.gradle.messaging.remote.internal.WorkerProtocol.handleIncoming(WorkerProtocol.java:31)
	at org.gradle.messaging.remote.internal.ProtocolStack$ProtocolStage.handleIncoming(ProtocolStack.java:167)
	at org.gradle.messaging.remote.internal.ProtocolStack$BottomStage.handleIncoming(ProtocolStack.java:277)
	at org.gradle.messaging.remote.internal.ProtocolStack$BottomConnection$1.run(ProtocolStack.java:299)
	at org.gradle.messaging.remote.internal.ProtocolStack$ExecuteRunnable.dispatch(ProtocolStack.java:120)
	at org.gradle.messaging.remote.internal.ProtocolStack$ExecuteRunnable.dispatch(ProtocolStack.java:116)
	at org.gradle.messaging.dispatch.AsyncDispatch.dispatchMessages(AsyncDispatch.java:132)
	at org.gradle.messaging.dispatch.AsyncDispatch.access$000(AsyncDispatch.java:33)
	at org.gradle.messaging.dispatch.AsyncDispatch$1.run(AsyncDispatch.java:72)
	at org.gradle.internal.concurrent.DefaultExecutorFactory$StoppableExecutorImpl$1.run(DefaultExecutorFactory.java:66)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1146)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:679)