最近编写了一个mapreduce程序,在IDEA中执行发现出现这个问题,百度各种方案都不行,花了好久才解决,现在把最近看过和亲测过的方案整理如下,希望能让别人早日出坑。报错信息如下:
19/08/27 10:36:57 ERROR Shell: Failed to locate the winutils binary in the hadoop binary path
java.io.IOException: Could not locate executable null\bin\winutils.exe in the Hadoop binaries.
at org.apache.hadoop.util.Shell.getQualifiedBinPath(Shell.java:407)
at org.apache.hadoop.util.Shell.getWinUtilsPath(Shell.java:422)
at org.apache.hadoop.util.Shell.<clinit>(Shell.java:415)
at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:79)
at org.apache.hadoop.security.Groups.parseStaticMapping(Groups.java:168)
at org.apache.hadoop.security.Groups.<init>(Groups.java:132)
at org.apache.hadoop.security.Groups.<init>(Groups.java:100)
at org.apache.hadoop.security.Groups.getUserToGroupsMappingService(Groups.java:435)
at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:341)
at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:308)
at org.apache.hadoop.security.UserGroupInformation.loginUserFromSubject(UserGroupInformation.java:895)
at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:861)
at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:728)
at org.apache.hadoop.mapreduce.task.JobContextImpl.<init>(JobContextImpl.java:72)
at org.apache.hadoop.mapreduce.Job.<init>(Job.java:145)
at org.apache.hadoop.mapreduce.Job.getInstance(Job.java:188)
at com.wxx.bigdata.hadoop.mapreduce.wc.WordCountApp.main(WordCountApp.java:24)
19/08/27 10:36:58 INFO deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
19/08/27 10:36:58 INFO JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
Exception in thread "main" java.lang.UnsatisfiedLinkError: org.apache.hadoop.io.nativeio.NativeIO$Windows.createDirectoryWithMode0(Ljava/lang/String;I)V
at org.apache.hadoop.io.nativeio.NativeIO$Windows.createDirectoryWithMode0(Native Method)
at org.apache.hadoop.io.nativeio.NativeIO$Windows.createDirectoryWithMode(NativeIO.java:524)
at org.apache.hadoop.fs.RawLocalFileSystem.mkOneDirWithMode(RawLocalFileSystem.java:465)
at org.apache.hadoop.fs.RawLocalFileSystem.mkdirsWithOptionalPermission(RawLocalFileSystem.java:518)
at org.apache.hadoop.fs.RawLocalFileSystem.mkdirs(RawLocalFileSystem.java:496)
at org.apache.hadoop.fs.FilterFileSystem.mkdirs(FilterFileSystem.java:316)
at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:133)
at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:148)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1307)
at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1304)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1924)
at org.apache.hadoop.mapreduce.Job.submit(Job.java:1304)
at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1325)
at com.wxx.bigdata.hadoop.mapreduce.wc.WordCountApp.main(WordCountApp.java:54)
pom文件信息
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.wxx.bigdata</groupId>
<artifactId>wxx-hadoop</artifactId>
<version>1.0</version>
<name>wxx-hadoop</name>
<!-- FIXME change it to the project's website -->
<url>http://www.example.com</url>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<hadoop.version>2.6.0-cdh5.15.1</hadoop.version>
</properties>
<repositories>
<repository>
<id>cloudera</id>
<url>https://repository.cloudera.com/artifactory/cloudera-repos</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<pluginManagement><!-- lock down plugins versions to avoid using Maven defaults (may be moved to parent pom) -->
<plugins>
<!-- clean lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#clean_Lifecycle -->
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<version>3.1.0</version>
</plugin>
<!-- default lifecycle, jar packaging: see https://maven.apache.org/ref/current/maven-core/default-bindings.html#Plugin_bindings_for_jar_packaging -->
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<version>3.0.2</version>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
</plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.22.1</version>
</plugin>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<version>3.0.2</version>
</plugin>
<plugin>
<artifactId>maven-install-plugin</artifactId>
<version>2.5.2</version>
</plugin>
<plugin>
<artifactId>maven-deploy-plugin</artifactId>
<version>2.8.2</version>
</plugin>
<!-- site lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#site_Lifecycle -->
<plugin>
<artifactId>maven-site-plugin</artifactId>
<version>3.7.1</version>
</plugin>
<plugin>
<artifactId>maven-project-info-reports-plugin</artifactId>
<version>3.0.0</version>
</plugin>
</plugins>
</pluginManagement>
</build>
</project>
开发环境:
Java编译环境(jdk1.8)
操作系统:win7
IDE:IDEA
hadoop-2.6.0-cdh5.15.1
解决方案:
1.下载编译好的hadoop压缩包(hadoop-2.6.0-cdh5.15.1.tar.gz),解压到相应的目录下(
D:\app\hadoop\hadoop-2.6.0-cdh5.15.1
)。
链接:https://pan.baidu.com/s/1cVyEF1CPduT2lsyDorMMVQ
提取码:dy5z
复制这段内容后打开百度网盘手机App,操作更方便哦
2.去如下地址下载相应的文件(主要是hadoop.dll和winutils.exe)
https://github.com/steveloughran/winutils
3.将第2步中hadoop2.7.1中的hadoop.dll,winutils等文件拷贝到第1步中解压后的bin目录下(
D:\app\hadoop\hadoop-2.6.0-cdh5.15.1\bin
)。
4.设置环境变量HADOOP_HOME为解压目录(
D:\app\hadoop\hadoop-2.6.0-cdh5.15.1
),并在path环境变量里追加:%HADOOP_HOME%\bin。
5.将hadoop.dll,winutils文件放在c盘下的system32目录中。
6.加入静态代码块,加载hadoop.dll,意思就是让其强制加载bin目录下的hadoop.dll:
static {
try {
System.load("C:\\Users\\whz\\Downloads\\hadoop2.7.6\\bin\\hadoop.dll");
} catch (UnsatisfiedLinkError e) {
System.err.println("Native code library failed to load.\n" + e);
System.exit(1);
}
}
7.重新运行wordcount代码,问题得到解决。
PS : 开始在第2步,使用的是hadoop-2.6.0的hadoop.dll,winutils文件,问题没有得到解决,后来换到hadoop-2.7.0,问题迎刃而解,出现这个问题的小伙伴可以这样试一试。