使用HDP在Hortonworks上运行jar会引发ClassNotFoundException

user_s

我正在尝试在带有HDP的hortonworks沙箱上从Coursera运行一个简单的map reduce程序。这是程序(取自https://github.com/jz33/Coursera-Cloud-Computing-Applications-Solution-Manual/blob/master/hw2/TitleCount.java):

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import java.io.*;
import java.util.*;
/**
* Classic "Word Count"
 */
public class TitleCount extends Configured implements Tool {
    public static void main(String[] args) throws Exception {
       int res = ToolRunner.run(new Configuration(), new TitleCount(), args);
    System.exit(res);
}

@Override
public int run(String[] args) throws Exception {
    Job job = Job.getInstance(this.getConf(), "Title Count");
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(IntWritable.class);

    job.setMapperClass(TitleCountMap.class);
    job.setReducerClass(TitleCountReduce.class);

    FileInputFormat.setInputPaths(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    job.setJarByClass(TitleCount.class);
    return job.waitForCompletion(true) ? 0 : 1;
}

public static String readHDFSFile(String path, Configuration conf) throws IOException{
    Path pt=new Path(path);
    FileSystem fs = FileSystem.get(pt.toUri(), conf);
    FSDataInputStream file = fs.open(pt);
    BufferedReader buffIn=new BufferedReader(new InputStreamReader(file));

    StringBuilder everything = new StringBuilder();
    String line;
    while( (line = buffIn.readLine()) != null) {
        everything.append(line);
        everything.append("\n");
    }
    return everything.toString();
}

public static class TitleCountMap extends Mapper<Object, Text, Text, IntWritable> {
    Set<String> stopWords = new HashSet<String>();
    String delimiters;

    @Override
    protected void setup(Context context) throws IOException,InterruptedException {

        Configuration conf = context.getConfiguration();
        
        String delimitersPath = conf.get("delimiters");
        delimiters = readHDFSFile(delimitersPath, conf);
        
        String stopWordsPath = conf.get("stopwords");
        List<String> stopWordsList = Arrays.asList(readHDFSFile(stopWordsPath, conf).split("\n"));
        for(String e : stopWordsList){
            stopWords.add(e);
        }
    }

    @Override
    public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
        StringTokenizer stk = new StringTokenizer(value.toString(),delimiters);
        while(stk.hasMoreTokens()){
            String e = stk.nextToken().trim().toLowerCase();
            if(stopWords.contains(e) == false){
                context.write(new Text(e),new IntWritable(1));
            }
        }
    }
}

public static class TitleCountReduce extends Reducer<Text, IntWritable, Text, IntWritable> {
    @Override
    public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
        int sum = 0;
        for(IntWritable e : values){
            sum += e.get();
        }
        context.write(key, new IntWritable(sum));
    }
   }
 }

这是我的运行方式:

  • git clone https://github.com/xldrx/cloudapp-mp2.git

  • cd cloudapp-mp2

  • sudo -u hdfs bash start.sh

  • cp /media/sharedFolder/TitleCount.java .

  • mkdir build

  • export HADOOP_CLASSPATH=$JAVA_HOME/lib/tools.jar

  • hadoop com.sun.tools.javac.Main TitleCount.java -d build

  • jar -cvf TitleCount.jar -C build/ ./

  • sudo -u hdfs hadoop fs -rm -r /mp2/A-output

    之后,我有了带有.class文件的构建文件夹(TitleCount.class,TitleCount $ TitleCountMap.class,TitleCount $ TitleCountReduce.class)和当前文件夹-cloud-mp2以及TitleCount.jar文件。

  • sudo -u hdfs hadoop jar TitleCount.jar TitleCount -D stopwords=/mp2/misc/stopwords.txt -D delimiters=/mp2/misc/delimiters.txt /mp2/titles /mp2/A-output

这是我得到的错误:

错误:java.lang.RuntimeException:java.lang.ClassNotFoundException:类未找到TitleCount $ TitleCountReduce

完整跟踪:

000_0, Status : FAILED
Error: java.lang.RuntimeException: java.lang.ClassNotFoundException: Class Title
Count$TitleCountReduce not found
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2195
)
        at org.apache.hadoop.mapreduce.task.JobContextImpl.getReducerClass(JobCo
ntextImpl.java:210)
        at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:611
)
        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInforma
tion.java:1657)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
Caused by: java.lang.ClassNotFoundException: Class TitleCount$TitleCountReduce n
ot found
        at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.jav
a:2101)
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2193
)
        ... 8 more

15/12/30 12:46:45 INFO mapreduce.Job: Task Id : attempt_1451479269826_0001_r_000
000_1, Status : FAILED
Error: java.lang.RuntimeException: java.lang.ClassNotFoundException: Class Title
Count$TitleCountReduce not found
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2195
)
        at org.apache.hadoop.mapreduce.task.JobContextImpl.getReducerClass(JobCo
ntextImpl.java:210)
        at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:611
)
        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInforma
tion.java:1657)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
Caused by: java.lang.ClassNotFoundException: Class TitleCount$TitleCountReduce n
ot found
        at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.jav
a:2101)
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2193
)
        ... 8 more

15/12/30 12:46:57 INFO mapreduce.Job: Task Id : attempt_1451479269826_0001_r_000
000_2, Status : FAILED
Error: java.lang.RuntimeException: java.lang.ClassNotFoundException: Class Title
Count$TitleCountReduce not found
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2195
)
        at org.apache.hadoop.mapreduce.task.JobContextImpl.getReducerClass(JobCo
ntextImpl.java:210)
        at org.apache.hadoop.mapred.ReduceTask.runNewReducer(ReduceTask.java:611
)
        at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:389)
        at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:164)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInforma
tion.java:1657)
        at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
Caused by: java.lang.ClassNotFoundException: Class TitleCount$TitleCountReduce n
ot found
        at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.jav
a:2101)
        at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2193
)
        ... 8 more

15/12/30 12:47:05 INFO mapreduce.Job:  map 0% reduce 100%
15/12/30 12:47:06 INFO mapreduce.Job: Job job_1451479269826_0001 failed with sta
te FAILED due to: Task failed task_1451479269826_0001_r_000000
Job failed as tasks failed. failedMaps:0 failedReduces:1

15/12/30 12:47:07 INFO mapreduce.Job: Counters: 7
        Job Counters
                Failed reduce tasks=4
                Launched reduce tasks=4
                Total time spent by all maps in occupied slots (ms)=0
                Total time spent by all reduces in occupied slots (ms)=32247
                Total time spent by all reduce tasks (ms)=32247
                Total vcore-seconds taken by all reduce tasks=32247
                Total megabyte-seconds taken by all reduce tasks=8061750
user_s

问题是我使用sudo -u hdfs而不是root用户。

我必须添加/ user / root文件夹:

sudo -u hdfs hadoop fs -mkdir /user/root 

并授予/ user / root和/ mp2的权限:

sudo -u hdfs hadoop fs -chown root:root /user/root
sudo -u hdfs hadoop fs -chown root:root /mp2
sudo -u hdfs hadoop fs -chown root:root /mp2/*

本文收集自互联网,转载请注明来源。

如有侵权,请联系[email protected] 删除。

编辑于
0

我来说两句

0条评论
登录后参与评论

相关文章

来自分类Dev

在TestNG中运行时,即使对于内置类型,使用反射创建类也会引发java.lang.ClassNotFoundException

来自分类Dev

在 Hortonworks 沙箱上运行 php

来自分类Dev

如何安装Hortonworks的HDP?

来自分类Dev

.Jar在Windows上运行正常,但在MAC上引发异常

来自分类Dev

在TeamCity上运行OpenTk依赖的exe会引发AccessViolationException

来自分类Dev

在 Oracle Virtual Box 上运行 Hortonworks Sandbox

来自分类Dev

HortonWorks Sandbox HDP 卡住了

来自分类Dev

使用持久卷运行 mongo 会引发错误 - Kubernetes

来自分类Dev

在向量上使用assert_that会引发错误

来自分类Dev

Kotlin集合在Android上引发ClassNotFoundException

来自分类Dev

在EC2实例上使用PHP将文件上传到s3会引发凭据错误,但可在本地运行

来自分类Dev

如何在Hortonworks HDP中安装Ambari?

来自分类Dev

在Windows上运行“ docker-compose up”会引发“没有名为fnctl的模块”错误

来自分类Dev

在Nano Server上运行ASP.NET 5会引发“无法加载DLL'kernel32'”

来自分类Dev

Python:在任务执行时在Synology nas上运行我的程序会引发UnicodeEncodeError

来自分类Dev

在IPU模型上运行Tensorflow程序会引发“非法指令(核心转储)”错误

来自分类Dev

URI类的Java工具:添加依赖项会引发ClassNotFoundException

来自分类Dev

运行 Flask 会引发“WinError 10038”

来自分类Dev

无法在HDP 2.0上运行Spark 1.0 SparkPi

来自分类Dev

如何在 YARN 和 HDP 上运行 Spark 2.2?

来自分类Dev

使用--jars选项时,Spark引发ClassNotFoundException

来自分类Dev

Pig Service Check使用失败-用户:rm/sandbox.hortonworks.com@HDP-SANDBOX不能模拟ambari-qa

来自分类Dev

在类型定义中使用泛型时,Dart会引发运行时异常

来自分类Dev

在PythonAnywhere上使用Flask-Stormpath使用Facebook登录会引发JSONDecodeError

来自分类Dev

在Android上读取文件会引发NullPointerException

来自分类Dev

运行'activator ui'会在Play 2.3.2中引发ClassNotFoundException

来自分类Dev

在向后排序的数组上使用存储桶排序时,程序会引发ArrayIndexOutOfBoundsException

来自分类Dev

使用gzip恢复HDD图像会引发错误,设备上没有剩余空间

来自分类Dev

使用 std::copy 时,向量上的 ostream 重载会引发错误

Related 相关文章

  1. 1

    在TestNG中运行时,即使对于内置类型,使用反射创建类也会引发java.lang.ClassNotFoundException

  2. 2

    在 Hortonworks 沙箱上运行 php

  3. 3

    如何安装Hortonworks的HDP?

  4. 4

    .Jar在Windows上运行正常,但在MAC上引发异常

  5. 5

    在TeamCity上运行OpenTk依赖的exe会引发AccessViolationException

  6. 6

    在 Oracle Virtual Box 上运行 Hortonworks Sandbox

  7. 7

    HortonWorks Sandbox HDP 卡住了

  8. 8

    使用持久卷运行 mongo 会引发错误 - Kubernetes

  9. 9

    在向量上使用assert_that会引发错误

  10. 10

    Kotlin集合在Android上引发ClassNotFoundException

  11. 11

    在EC2实例上使用PHP将文件上传到s3会引发凭据错误,但可在本地运行

  12. 12

    如何在Hortonworks HDP中安装Ambari?

  13. 13

    在Windows上运行“ docker-compose up”会引发“没有名为fnctl的模块”错误

  14. 14

    在Nano Server上运行ASP.NET 5会引发“无法加载DLL'kernel32'”

  15. 15

    Python:在任务执行时在Synology nas上运行我的程序会引发UnicodeEncodeError

  16. 16

    在IPU模型上运行Tensorflow程序会引发“非法指令(核心转储)”错误

  17. 17

    URI类的Java工具:添加依赖项会引发ClassNotFoundException

  18. 18

    运行 Flask 会引发“WinError 10038”

  19. 19

    无法在HDP 2.0上运行Spark 1.0 SparkPi

  20. 20

    如何在 YARN 和 HDP 上运行 Spark 2.2?

  21. 21

    使用--jars选项时,Spark引发ClassNotFoundException

  22. 22

    Pig Service Check使用失败-用户:rm/sandbox.hortonworks.com@HDP-SANDBOX不能模拟ambari-qa

  23. 23

    在类型定义中使用泛型时,Dart会引发运行时异常

  24. 24

    在PythonAnywhere上使用Flask-Stormpath使用Facebook登录会引发JSONDecodeError

  25. 25

    在Android上读取文件会引发NullPointerException

  26. 26

    运行'activator ui'会在Play 2.3.2中引发ClassNotFoundException

  27. 27

    在向后排序的数组上使用存储桶排序时,程序会引发ArrayIndexOutOfBoundsException

  28. 28

    使用gzip恢复HDD图像会引发错误,设备上没有剩余空间

  29. 29

    使用 std::copy 时,向量上的 ostream 重载会引发错误

热门标签

归档