1. 通过对core-site.xml配置文件进行配置。
配置项:hadoop.tmp.dir表示命名节点上存放元数据的目录位置,对于数据节点则为该节点上存放文件数据的目录。
配置项:fs.default.name表示命名的IP地址和端口号,缺省值是file:///,对于JavaAPI来讲,连接HDFS必须使用这里的配置的URL地址,对于数据节点来讲,数据节点通过该URL来访问命名节点。
2 利用java api来访问hdfs上的文件,并进行修改。
* /**上传文件到HDFS上去*/ private static void uploadToHdfs() throws FileNotFoundException,IOException {
String localSrc = "d://123.txt";
String dst = "hdfs://localhost:9000/";
InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
OutputStream out = fs.create(new Path(dst), new Progressable() {
public void progress() {
System.out.print(".");
}
});
IOUtils.copyBytes(in, out, 4096, true);
}
String localSrc = "d://123.txt";
String dst = "hdfs://localhost:9000/";
InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
OutputStream out = fs.create(new Path(dst), new Progressable() {
public void progress() {
System.out.print(".");
}
});
IOUtils.copyBytes(in, out, 4096, true);
}
* /**从HDFS上读取文件*/
private static void uploadToHdfs() throws FileNotFoundException,IOException {String localSrc = "d://123.txt";
InputStream in = new BufferedInputStream(new FileInputStream(localSrc)); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(dst), conf); OutputStream out = fs.create(new Path(dst), new Progressable() { public void progress() { System.out.print("."); } }); IOUtils.copyBytes(in, out, 4096, true); }
String dst = "hdfs://localhost:9000/";* /**从HDFS上读取文件*/
private static void readFromHdfs() throws FileNotFoundException,IOException {String dst = "hdfs://localhost:9000/";
Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(dst), conf); FSDataInputStream hdfsInStream = fs.open(new Path(dst)); OutputStream out = new FileOutputStream("d:/qq-hdfs.txt"); byte[] ioBuffer = new byte[1024]; int readLen = hdfsInStream.read(ioBuffer); while(-1 != readLen){ out.write(ioBuffer, 0, readLen); readLen = hdfsInStream.read(ioBuffer); } out.close(); hdfsInStream.close(); fs.close(); }
* /**以append方式将内容添加到HDFS上文件的末尾;注意:文件更新,需要在hdfs-site.xml中添<property><name>dfs.append.support</name><value>true</value></property>*/private static void appendToHdfs() throws FileNotFoundException,IOException {String dst = "hdfs://localhost:9000/";
Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(dst), conf); FSDataOutputStream out = fs.append(new Path(dst)); int readLen = "zhangzk add by hdfs java api".getBytes().length; while(-1 != readLen){ out.write("zhangzk add by hdfs java api".getBytes(), 0, readLen); } out.close(); fs.close(); } * /**从HDFS上删除文件*/private static void deleteFromHdfs() throws FileNotFoundException,IOException {String dst = "hdfs://localhost:9000/";
Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(dst), conf); fs.deleteOnExit(new Path(dst)); fs.close(); } /**遍历HDFS上的文件和目录*/private static void getDirectoryFromHdfs() throws FileNotFoundException,IOException {
String dst = "hdfs://localhost:9000/";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
FileStatus fileList[] = fs.listStatus(new Path(dst));
int size = fileList.length;
for(int i = 0; i < size; i++){
System.out.println("name:" + fileList[i].getPath().getName() + "/t/tsize:" + fileList[i].getLen());
}
fs.close();
}
}* 最后直接设置配置文件
Configuration conf = new Configuration();
conf.addResource(new Path(
"/project/hadoop-0.20.2/conf/hdfs-site.xml"));
conf.addResource(new Path(
"/project/hadoop-0.20.2/conf/core-site.xml"));
conf.addResource(new Path(
"/project/hadoop-0.20.2/conf/mapred-site.xml"));
conf.addResource(new Path(
"/project/hadoop-0.20.2/src/hdfs/hdfs-default.xml"));
conf.set(UnixUserGroupInformation.UGI_PROPERTY_NAME, name);
参考文茂