public class HdfsApp { public static FileSystem getFileSystem() throws Exception { Configuration conf = new Configuration();
//将配置文件copy 到资源文件
// cp /opt/modules/hadoop-2.5.0/etc/hadoop/core-site.xml /opt/modules/hadoop-2.5.0/etc/hadoop/hdfs-site.xml /opt/tools/eclipse-workspace/hadoop/src/main/resources
// 加载日志配置
// cp /opt/modules/hadoop-2.5.0/etc/hadoop/log4j.properties /opt/tools/eclipse-workspace/hadoop/src/main/resources/
// create a filesystem
FileSystem fileSystem = FileSystem.get(conf);
return fileSystem;
} // read data from hdfs
public static void read(String filename) throws Exception {
FileSystem fileSystem = getFileSystem();
Path path = new Path(filename);
FSDataInputStream fis = fileSystem.open(path);
try {
IOUtils.copyBytes(fis, System.out, 4096, false);
} catch (Exception e) {
e.printStackTrace();
} finally {
IOUtils.closeStream(fis);
}
} //write data in hdfs
public static void write(String writeFileName) throws Exception {
FileSystem fileSystem = getFileSystem();
// hdfs site
Path path = new Path(writeFileName);
FSDataOutputStream fos = fileSystem.create(path); FileInputStream fis = new FileInputStream(new File("/opt/modules/hadoop-2.5.0/hdfs.input"));
try {
IOUtils.copyBytes(fis, fos, 4096, false);
} catch (Exception e) {
e.printStackTrace();
} finally {
IOUtils.closeStream(fis);
IOUtils.closeStream(fos);
} } public static void main(String[] args) throws Exception {
// String filename = "/user/chris/mapreduce/wordcount/input/wc.input";
// read(filename);
String writeFileName = "/user/chris/put-wc.input";
write(writeFileName);
}
}
maven Denpendency
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<hadoop.version>2.5.0</hadoop.version>
</properties> <dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.10</version>
</dependency>
</dependencies>