eclipse操作HDFS集群API
eclipse操作HDFS集群
windows下配置环境
1.配置HADOOP_HOME

hadoop-common-2.2.0-bin-master下载
2.配置HADOOP_USER_NAME
防止使用时出现权限问题
3.修改Path
修改eclipse配置
1.添加插件
eclipse文件夹-->dropins文件夹-->plugins-->hadoop-eclipse-plugin-2.6.0.jar
hadoop-eclipse-plugin-2.6.0.jar百度云下载
启动eclipse
1.打开Map/Reduce窗口
2.建立连接
1.空白处右击选择New Hadoop location
2.修改信息
3.查看HDFS文件
3.创建java工程
1.导入jar包
导入hadoop目录下的sharw/hadoop中common、hdfs、tools中的jar包及lib文件下的jar包
记得build path
2.导入xml配置文件
导入hadoop目录下的etc/hadoop中的hdfs-site.xml和core.hdfs.xml。
3.创建log4j.properties文件
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/spring.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n
4.创建Java文件
import java.io.FileNotFoundException;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.HdfsBlockLocation;
import org.apache.hadoop.fs.Path;
import com.sun.xml.bind.v2.schemagen.xmlschema.List;
public class HDFSTest{
public static void main(String[] args) throws IOException {
//操作HDFS之前得先创建配置对象
Configuration conf = new Configuration(true);
//创建操作HDFS的对象
FileSystem fs = FileSystem.get(conf);
//查看文件系统的内容
List list = listFileSystem(fs,"/");
//创建文件夹
createDir(fs,"/test/abc");
//上传文件
uploadFileToHDFS(fs,"D:\\hadoop\\VMware.txt","/test/abc/");
//下载文件
downLoadFileFromHDFS(fs,"/test/abc/VMware.txt","d:/");
//删除.....
//重命名
// renameFile(fs,"/test/abc/VMware.txt","/test/abc/Angelababy");
//内部移动 内部复制
// innerCopyAndMoveFile(fs,conf,"/test/abc/Angelababy","/");
//创建一个新文件
// createNewFile(fs,"/test/abc/hanhong");
//写文件
// writeToHDFSFile(fs,"/test/abc/hanhong","hello world");
//追加写
// appendToHDFSFile(fs,"/test/abc/hanhong","\nhello world");
//读文件内容
// readFromHDFSFile(fs,"/test/abc/hanhong");
//获取数据的位置
// getFileLocation(fs,"/Angelababy");
}
private static void getFileLocation(FileSystem fs, String string) throws IOException {
FileStatus fileStatus = fs.getFileStatus(new Path(string));
long len = fileStatus.getLen();
BlockLocation[] fileBlockLocations = fs.getFileBlockLocations(fileStatus, 0, len);
String[] hosts = fileBlockLocations[0].getHosts();
for (String string2 : hosts) {
System.out.println(string2);
}
HdfsBlockLocation blockLocation = (HdfsBlockLocation)fileBlockLocations[0];
long blockId = blockLocation.getLocatedBlock().getBlock().getBlockId();
System.out.println(blockId);
}
private static void readFromHDFSFile(FileSystem fs, String string) throws IllegalArgumentException, IOException {
FSDataInputStream inputStream = fs.open(new Path(string));
FileStatus fileStatus = fs.getFileStatus(new Path(string));
long len = fileStatus.getLen();
byte[] b = new byte[(int)len];
int read = inputStream.read(b);
while(read != -1){
System.out.println(new String(b));
read = inputStream.read(b);
}
}
private static void appendToHDFSFile(FileSystem fs, String filePath, String content) throws IllegalArgumentException, IOException {
FSDataOutputStream append = fs.append(new Path(filePath));
append.write(content.getBytes("UTF-8"));
append.flush();
append.close();
}
private static void writeToHDFSFile(FileSystem fs, String filePath, String content) throws IllegalArgumentException, IOException {
FSDataOutputStream outputStream = fs.create(new Path(filePath));
outputStream.write(content.getBytes("UTF-8"));
outputStream.flush();
outputStream.close();
}
private static void createNewFile(FileSystem fs, String string) throws IllegalArgumentException, IOException {
fs.createNewFile(new Path(string));
}
private static void innerCopyAndMoveFile(FileSystem fs, Configuration conf,String src, String dest) throws IOException {
Path srcPath = new Path(src);
Path destPath = new Path(dest);
//内部拷贝
// FileUtil.copy(srcPath.getFileSystem(conf), srcPath, destPath.getFileSystem(conf), destPath,false, conf);
//内部移动
FileUtil.copy(srcPath.getFileSystem(conf), srcPath, destPath.getFileSystem(conf), destPath,true, conf);
}
private static void renameFile(FileSystem fs, String src, String dest) throws IOException {
Path srcPath = new Path(src);
Path destPath = new Path(dest);
fs.rename(srcPath, destPath);
}
private static void downLoadFileFromHDFS(FileSystem fs, String src, String dest) throws IOException {
Path srcPath = new Path(src);
Path destPath = new Path(dest);
//copyToLocal
fs.copyToLocalFile(srcPath, destPath);
//moveToLocal
//将HDFS上的文件删除下载
// fs.copyToLocalFile(true,srcPath, destPath);
}
private static void uploadFileToHDFS(FileSystem fs, String src, String dest) throws IOException {
Path srcPath = new Path(src);
Path destPath = new Path(dest);
//copyFromLocal
fs.copyFromLocalFile(srcPath, destPath);
//moveFromLocal
//将本地的文件剪切上传
// fs.copyFromLocalFile(true,srcPath, destPath);
}
private static void createDir(FileSystem fs, String string) throws IllegalArgumentException, IOException {
Path path = new Path(string);
if(fs.exists(path)){
fs.delete(path, true);
}
fs.mkdirs(path);
}
private static List listFileSystem(FileSystem fs, String path) throws FileNotFoundException, IOException {
Path ppath = new Path(path);
FileStatus[] listStatus = fs.listStatus(ppath);
for (FileStatus fileStatus : listStatus) {
System.out.println(fileStatus);
}
return null;
}
}
该项目只是简单实现部分功能,没有添加逻辑功能,需要自己补充可能出现的问题:
WARN [org.apache.hadoop.util.NativeCodeLoader] - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
这个warn可以无视,并不影响使用