Java代码 *** 作Hadoop
Java在Windows中,Hadoop在Linux中
先在hadoop pom.xml文件中导入以下几个包
org.apache.hadoop hadoop-hdfs2.7.6 org.apache.hadoop hadoop-common2.7.6 org.apache.hadoop hadoop-client2.7.6 junit junit4.3
public class HadoopAPI { FileSystem fs; @Before public void inti() throws Exception{ //hadoop配置文件 Configuration conf = new Configuration(); conf.set("dfs.replication","1"); URI uri = new URI("hdfs://master:9000"); fs= FileSystem.get(uri, conf); } //创建目录 @Test public void mkdir() throws Exception{ fs.mkdirs(new Path("/mk")); } //删除 //true:迭代删除 @Test public void delete() throws Exception{ fs.delete(new Path("/student.txt"),false); } //获取文件或目录 //list...获取单个 getFile...获取多个 @Test public void list() throws Exception{ FileStatus[] fs = this.fs.listStatus(new Path("/")); for (FileStatus f : fs) { System.out.println(f); System.out.println(f.getLen()); System.out.println(f.getBlockSize()); System.out.println(f.getPath()); System.out.println(f.getReplication()); } } //读取文件 @Test public void open() throws Exception{ FSDataInputStream open = fs.open(new Path("/student.txt")); BufferedReader br = new BufferedReader(new InputStreamReader(open)); String line; while((line=br.readLine())!=null){ System.out.println(line); } br.close(); open.close(); } //写文件 @Test public void create() throws Exception{ FSDataOutputStream create = fs.create(new Path("/test.txt")); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(create)); bw.write("你好!"); bw.newline(); bw.write("世界!"); bw.newline(); bw.close(); create.close(); } //上传 @Test public void copyFromLocal() throws Exception{ Path hdfs =new Path("/"); Path local =new Path("D:\BigData\ideaProjects\bd13\data\student.txt"); fs.copyFromLocalFile(local,hdfs); } //下载 @Test public void copyToLocal() throws Exception{ Path hdfs=new Path("/test.txt"); Path local=new Path("D:\BigData\ideaProjects\bd13\data"); fs.copyToLocalFile(false,hdfs,local,true); } }
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)