1、HDFS的Shell操作
1.1、基本语法
- hadoop fs 具体命令
- hdfs dfs 具体命令
具体命令 [-appendToFile … ] [-cat [-ignoreCrc] …] [-chgrp [-R] GROUP PATH…] [-chmod [-R] <MODE[,MODE]… | OCTALMODE> PATH…] [-chown [-R] [OWNER][:[GROUP]] PATH…] [-copyFromLocal [-f] [-p] … ] [-copyToLocal [-p] [-ignoreCrc] [-crc] … ] [-count [-q]
…]
[-cp [-f] [-p] … ]
[-df [-h] [
…]]
[-du [-s] [-h]
…]
[-get [-p] [-ignoreCrc] [-crc] … ]
[-getmerge [-nl] ]
[-help [cmd …]]
[-ls [-d] [-h] [-R] [
…]]
[-mkdir [-p]
…]
[-moveFromLocal … ]
[-moveToLocal ]
[-mv … ]
[-put [-f] [-p] … ]
[-rm [-f] [-r|-R] [-skipTrash] …]
[-setrep [-R] [-w]
…]
[-stat [format]
…]
[-tail [-f] ]
[-test -[defsz]
]
[-text [-ignoreCrc] …]
1.2、上传
-moveFromLocal:从本地剪切粘贴到 HDFS
-copyFromLocal:从本地文件系统中拷贝文件到 HDFS 路径去
-put:等同于 copyFromLocal,生产环境更习惯用 put
-appendToFile:追加一个文件到已经存在的文件末尾
1.3、下载
-copyToLocal:从 HDFS 拷贝到本地
-get:等同于 copyToLocal,生产环境更习惯用 get
1.4、HDFS 直接操作
-ls: 显示目录信息
-cat:显示文件内容
-chgrp、-chmod、-chown:Linux 文件系统中的用法一样,修改文件所属权限
-mkdir:创建路径
-cp:从 HDFS 的一个路径拷贝到 HDFS 的另一个路径
-mv:在 HDFS 目录中移动文件
-tail:显示一个文件的末尾 1kb 的数据
-rm:删除文件或文件夹
-rm -r:递归删除目录及目录里面内容
-du 统计文件夹的大小信息
-setrep:设置 HDFS 中文件的副本数量
2、HDFS的API操作
2.1、获取文件系统
Configuration conf = new Configuration();
conf.set("dfs.replication", "1");
URI uri = new URI("hdfs://master:9000");
FileSystem fs = FileSystem.get(uri, conf);
2.2、javaAPI操作HDFS
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Before;
import org.junit.Test;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.URI;
public class HadoopAPI {
FileSystem fs;
@Before
public void init() throws Exception{
Configuration conf = new Configuration();
conf.set("dfs.replication", "1");
URI uri = new URI("hdfs://master:9000");
fs = FileSystem.get(uri, conf);
}
@Test
public void mkdir() throws Exception{
fs.mkdirs(new Path("/mk"));
}
@Test
public void delete()throws Exception{
fs.delete(new Path("/data"),true);
}
@Test
public void listStatus()throws Exception{
FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
for (FileStatus fileStatus : fileStatuses) {
System.out.println(fileStatus.getLen());
System.out.println(fileStatus.getBlockSize());
System.out.println(fileStatus.getPath());
System.out.println(fileStatus.getReplication());
}
}
@Test
public void getFileStatus()throws Exception{
FileStatus fileStatus = fs.getFileStatus(new Path("/student.txt"));
System.out.println(fileStatus);
}
@Test
public void load()throws Exception{
FSDataInputStream open = fs.open(new Path("/student.txt"));
BufferedReader br = new BufferedReader(new InputStreamReader(open));
String line;
while ((line=br.readLine())!=null){
System.out.println(line);
}
br.close();
open.close();
}
@Test
public void create()throws Exception{
FSDataOutputStream fsDataOutputStream = fs.create(new Path("/test.txt"));
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream));
bw.write("你好!");
bw.newLine();
bw.write("世界!");
bw.newLine();
bw.close();
fsDataOutputStream.close();
}
@Test
public void copyFromLocalFile() throws Exception{
Path hdfs = new Path("/");
Path local = new Path("E:\\ideaFile\\shujia\\bd13\\data\\students.txt");
fs.copyFromLocalFile(local,hdfs);
}
@Test
public void copyToLocalFile()throws Exception{
Path path = new Path("/students.txt");
Path local = new Path("E:\\ideaFile\\shujia\\bd13\\data");
fs.copyToLocalFile(false,path,local,true);
}
}
|