利用JavaAPI访问HDFS的文件

1、重读配置文件core-site.xml

要利用Java客户端来存取HDFS上的文件,不得不说的是配置文件Hadoop-0.20.2/conf/core-site.xml了,最初我就是在这里吃了大亏,所以我死活连不上HDFS,文件无法创建、读取。

<?xml version="1.0"?>

<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>

<configuration>

<!--- global properties -->

<property>

<name>hadoop.tmp.dir</name>

<value>/home/zhangzk/hadoop</value>

<description>A base for other temporary directories.</description>

</property>

<!-- file system properties -->

<property>

<name>fs.default.name</name>

<value>hdfs://linux-zzk-113:9000</value>

</property>

</configuration>
 


配置项:hadoop.tmp.dir表示命名节点上存放元数据的目录位置,对于数据节点则为该节点上存放文件数据的目录。

配置项:fs.default.name表示命名的IP地址和端口号,缺省值是file:///,对于JavaAPI来讲,连接HDFS必须使用这里的配置的URL地址,对于数据节点来讲,数据节点通过该URL来访问命名节点。

2、利用JavaAPI来访问HDFS的文件与目录

package com.demo.hdfs;

import java.io.BufferedInputStream;

import java.io.FileInputStream;

import java.io.FileNotFoundException;

import java.io.FileOutputStream;

import java.io.IOException;

import java.io.InputStream;

import java.io.OutputStream;

import java.net.URI;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FSDataInputStream;

import org.apache.hadoop.fs.FSDataOutputStream;

import org.apache.hadoop.fs.FileStatus;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.IOUtils;

import org.apache.hadoop.util.Progressable;

/**

 * @author zhangzk

 *

 */

public class FileCopyToHdfs {

 public static void main(String[] args) throws Exception {

  try {

   //uploadToHdfs(); 

   //deleteFromHdfs();

   //getDirectoryFromHdfs();

   appendToHdfs();

   readFromHdfs();

  } catch (Exception e) {

   // TODO Auto-generated catch block

   e.printStackTrace();

  }

  finally

  {

   System.out.println("SUCCESS");

  }

 }

 /**上传文件到HDFS上去*/

 private static void uploadToHdfs() throws FileNotFoundException,IOException {

  String localSrc = "d://qq.txt";

  String dst = "hdfs://192.168.0.113:9000/user/zhangzk/qq.txt";

  InputStream in = new BufferedInputStream(new FileInputStream(localSrc));

  Configuration conf = new Configuration();

  FileSystem fs = FileSystem.get(URI.create(dst), conf);

  OutputStream out = fs.create(new Path(dst), new Progressable() {

   public void progress() {

    System.out.print(".");

   }

  });

  IOUtils.copyBytes(in, out, 4096, true);

 }

 /**从HDFS上读取文件*/

 private static void readFromHdfs() throws FileNotFoundException,IOException {

  String dst = "hdfs://192.168.0.113:9000/user/zhangzk/qq.txt";

  Configuration conf = new Configuration();

  FileSystem fs = FileSystem.get(URI.create(dst), conf);

  FSDataInputStream hdfsInStream = fs.open(new Path(dst));

  OutputStream out = new FileOutputStream("d:/qq-hdfs.txt");

  byte[] ioBuffer = new byte[1024];

  int readLen = hdfsInStream.read(ioBuffer);

  while(-1 != readLen){

  out.write(ioBuffer, 0, readLen);

  readLen = hdfsInStream.read(ioBuffer);

  }

  out.close();

  hdfsInStream.close();

  fs.close();

 }

 /**以append方式将内容添加到HDFS上文件的末尾;注意:文件更新,需要在hdfs-site.xml中添<property><name>dfs.append.support</name><value>true</value></property>*/

 private static void appendToHdfs() throws FileNotFoundException,IOException {

  String dst = "hdfs://192.168.0.113:9000/user/zhangzk/qq.txt";

  Configuration conf = new Configuration();

  FileSystem fs = FileSystem.get(URI.create(dst), conf);

  FSDataOutputStream out = fs.append(new Path(dst));

  int readLen = "zhangzk add by hdfs java api".getBytes().length;

  while(-1 != readLen){

  out.write("zhangzk add by hdfs java api".getBytes(), 0, readLen);

  }

  out.close();

  fs.close();

 }

 /**从HDFS上删除文件*/

 private static void deleteFromHdfs() throws FileNotFoundException,IOException {

  String dst = "hdfs://192.168.0.113:9000/user/zhangzk/qq-bak.txt";

  Configuration conf = new Configuration();

  FileSystem fs = FileSystem.get(URI.create(dst), conf);

  fs.deleteOnExit(new Path(dst));

  fs.close();

 }

 /**遍历HDFS上的文件和目录*/

 private static void getDirectoryFromHdfs() throws FileNotFoundException,IOException {

  String dst = "hdfs://192.168.0.113:9000/user/zhangzk";

  Configuration conf = new Configuration();

  FileSystem fs = FileSystem.get(URI.create(dst), conf);

  FileStatus fileList[] = fs.listStatus(new Path(dst));

  int size = fileList.length;

  for(int i = 0; i < size; i++){

  System.out.println("name:" + fileList[i].getPath().getName() + "\t\tsize:" + fileList[i].getLen());

  }

  fs.close();

 }

}
 


注意:对于append操作,从hadoop-0.21版本开始就不支持了