经过种种调试,Hadoop可以用了。
package hadoop;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IoUtils;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
public class upload {
public static void main(String[] args) throws Exception{
FileSystem fs = FileSystem.get(new URI("hdfs://localhost/:8020"),new Configuration(),"root");
InputStream in = new FileInputStream("D://text.txt");
OutputStream out = fs.create(new Path("/text.txt"));
IoUtils.copyBytes(in,out,4096,true);
System.out.println("上传Hadoop文件成功!");
}
}
package hadoop;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IoUtils;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
public class download {
public static void main(String[] args) throws Exception{
FileSystem fs = FileSystem.get(new URI("hdfs://localhost/:8020"),new Configuration());
InputStream is = fs.open(new Path("/text.txt"));
OutputStream out = new FileOutputStream("D://JAVA/text.txt");
IoUtils.copyBytes(is,out,4096,true);
System.out.println("下载完成");
}
}
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 [email protected] 举报,一经查实,本站将立刻删除。