微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

HDFS API编程

基于Centos虚拟机,搭建HDFS文件系统,完成HDFS API完成编程实验

  1. MakeDir.java:创建目录(FileSystem 类的mkdirs方法
    在my.hdfs包下,新建类MakeDir,功能是在HDFS的根目录下,创建名为hdfstest的目录。如果创建成功,打印出“Success”,失败则显示Failed”。
package my.hdfs;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class MakeDir {
	public static void main(String[] args) throws IOException, URISyntaxException {
		Configuration conf = new Configuration();
		String hdfsPath = "hdfs://ZhangYao-1:9000";
		FileSystem hdfs = FileSystem.get(new URI(hdfsPath), conf);
		String newDir = "/hdfstest";
		boolean result = hdfs.mkdirs(new Path(newDir));
		if (result) {
			System.out.println("Success!");
		}else {
			System.out.println("Failed!");
		}
	}
}

  1. CreateFile.java:创建文件(FileSystem 类的create方法
    在my.hdfs包下,新建类CreateFile,程序功能是在HDFS的目录/hdfstest下,创建名为testfile的文件
package my.hdfs;  
import java.io.IOException;  
import java.net.URI;  
import java.net.URISyntaxException;  
import org.apache.hadoop.conf.Configuration;  
import org.apache.hadoop.fs.FSDataOutputStream;  
import org.apache.hadoop.fs.FileSystem;  
import org.apache.hadoop.fs.Path;  
public class CreateFile {  
    public static void main(String[] args) throws IOException, URISyntaxException {  
        Configuration configuration = new Configuration();  
  
        String hdfsPath = "hdfs://ZhangYao-1:9000";  
        FileSystem fs = FileSystem.get(new URI(hdfsPath), configuration);  
  
        String filePath = "/hdfstest/testfile";  
  
        FSDataOutputStream os= fs.create(new Path(filePath));  
        os.close();
        fs.close();
  
        System.out.println("Finish!");  
    }  
}

  1. DeleteFile.java:删除文件(FileSystem 类的delete方法
    在my.hdfs包下,新建类DeleteFile,程序功能是先判断在HDFS的目录/hdfstest下是否存在名为testfile的文件,如果存在则删除文件
package my.hdfs;  
import java.io.IOException;  
import java.net.URI;  
import java.net.URISyntaxException;  
import org.apache.hadoop.conf.Configuration;  
import org.apache.hadoop.fs.FileSystem;  
import org.apache.hadoop.fs.Path;  
public class DeleteFile {
	
	public static void main(String[] args) throws IOException, URISyntaxException {
		Configuration conf = new Configuration();  
        String hdfsPath = "hdfs://ZhangYao-1:9000";  
        FileSystem fs = FileSystem.get(new URI(hdfsPath), conf);  
        String filePath = "/hdfstest/testfile";  
        boolean fe=fs.exists(new Path(filePath));
        if(fe)
        {
        		fs.delete(new Path(filePath));
        		 System.out.println("Delete File Successfully!");	
        }else {
				System.out.println("File Not Found!");
		}
	}

}

  1. copyFromLocalFile.java:将本地文件复制到HDFS,如果文件已有则覆盖(FileSystem 类的copyFromLocalFile方法
    在/home/hadoop下使用vi创建sample_data文件,向sample_data文件中写入Hello World。在my.hdfs包下,创建类copyFromLocalFile,程序功能是将本地Linux操作系统上的文件/home/hadoop/sample_data,上传到HDFS文件系统的/hdfstest目录下。
package my.hdfs;  
import java.io.IOException;  
import java.net.URI;  
import java.net.URISyntaxException;  
import org.apache.hadoop.conf.Configuration;  
import org.apache.hadoop.fs.FileSystem;  
import org.apache.hadoop.fs.Path;  
public class copyFromLocalFile {  
    public static void main(String[] args) throws IOException, URISyntaxException {  
        Configuration conf = new Configuration();  
        String hdfsPath = "hdfs://ZhangYao-1:9000";  
        FileSystem hdfs = FileSystem.get(new URI(hdfsPath), conf);  
        String filepath= "/home/hadoop/sample_data";  
        String to_HDFS = "/hdfstest/";  
        hdfs.copyFromLocalFile(new Path(filepath), new Path(to_HDFS));  
        System.out.println("Finish!");  
    }  
}  

  1. WriteFile.java:在HDFS上创建文件并写内容文件中(FileSystem 类的create方法及其返回的FSDataOutputStream)
    在my.hdfs包下,新建类WriteFile,程序功能是在HDFS上,创建/hdfstest/writefile文件,并在文件中写入一行内容“hello world hello data!”
package my.hdfs;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class WriteFile {
	
	public static void main(String[] args) throws IOException {
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://ZhangYao-1:9000");
		FileSystem fs = FileSystem.get(conf);
		byte[] buff = "Hello World Hello Data! \n".getBytes();
		String newFileName = "/hdfstest/writefile";
		FSDataOutputStream os = fs.create(new Path(newFileName));
		os.write(buff);
		System.out.println("Create:" + newFileName);
		os.close();
		fs.close();
	}
}

  1. ListFiles.java:显示HDFS文件系统的文件属性(FileSystem 类的listStatus方法
    在my.hdfs包下,新建类ListFiles,程序功能是列出HDFS文件系统/hdfstest目录下所有的文件,以及文件的权限、用户组、所属用户
package my.hdfs;  
import java.io.IOException;  
import java.net.URI;  
import org.apache.hadoop.conf.Configuration;  
import org.apache.hadoop.fs.FileStatus;  
import org.apache.hadoop.fs.FileSystem;  
import org.apache.hadoop.fs.Path;  
public class ListFiles {  
    public static void main(String[] args) throws IOException {  
        Configuration conf = new Configuration();  
        String hdfspath = "hdfs://:9000/";  
        FileSystem hdfs = FileSystem.get(URI.create(hdfspath), conf);  
        String watchHDFS = "/hdfstest";  
        FileStatus[] files = hdfs.listStatus(new Path(watchHDFS));  
       for  (FileStatus file : files) {  
            System.out.println(file.getPermission() + " " + file.getowner()  
                    + " " + file.getGroup() + " " + file.getPath());  
        }  
    }  
}  

  1. ReadFile.java:读取HDFS文件系统上的文件内容显示在控制台上并写入到本地文件
    新建文件mytext.txt,输入内容如下并将mytext.txt上传到HDFS的根目录下
package my.hdfs;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FSDataInputStream;

public class ReadFile {
	public static void main(String[] args) {
		try {
			Configuration conf = new Configuration();
			String hdfsPath = "hdfs://ZhangYao-1:9000";
			FileSystem fs = FileSystem.get(new URI(hdfsPath), conf);
			Path fileName = new Path("/mytext.txt");
			File file=new File("/home/hadoop/backup.txt");
			OutputStream out=new FileOutputStream(file);
			FSDataInputStream  fdi = fs.open(fileName);
			BufferedReader br = new BufferedReader(new InputStreamReader(fdi));
			String str = null;
		      while((str = br.readLine()) != null){
		      byte [] buff=(str+"\n").getBytes();
		      out.write(buff);
		      System.out.println(str);
		}
		    out.close();
			br.close(); 
			fs.close();
		} catch (Exception e) {
			e.printstacktrace();
		}
	}
}

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 [email protected] 举报,一经查实,本站将立刻删除。

相关推荐