Hadoop-HDFS的Java常用API

it2023-09-22  74

1.首先创建一个maven项目,下面是pom文件所需的jar包
cloudera https://repository.cloudera.com/artifactory/cloudera-repos/ org.apache.Hadoop Hadoop-common 2.6.0-cdh5.14.0 org.apache.Hadoop Hadoop-hdfs 2.6.0-cdh5.14.0 <dependency> <groupId>org.apache.Hadoop</groupId> <artifactId>Hadoop-mapreduce-client-core</artifactId> <version>2.6.0-cdh5.14.0</version> </dependency> <dependency> <groupId>org.apache.Hadoop</groupId> <artifactId>Hadoop-client</artifactId> <version>2.6.0-mr1-cdh5.14.0</version> </dependency> <!-- https://mvnrepository.com/artifact/junit/junit --> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.11</version> <scope>test</scope> </dependency> <dependency> <groupId>org.testng</groupId> <artifactId>testng</artifactId> <version>RELEASE</version> </dependency> <dependency> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> <version>3.4.7</version> </dependency> </dependencies>
下面是HDFS的方法
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.text.SimpleDateFormat; public class HDFSDemo { static SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); //将此类所相同的代码抽成方法,有两种获取实例和FileSystem方法 /** * 方法一 * @return * @throws IOException * @throws URISyntaxException */ private static FileSystem getFileSystem() throws IOException, URISyntaxException { //1.实例configretion Configuration configuration = new Configuration(); //2.实例FileSystem对象 return FileSystem.get(new URI("hdfs://192.168.10.55:8020"), configuration); } /** * 方法二 * @return * @throws IOException * @throws URISyntaxException */ private static FileSystem getFileSystem_2() throws IOException, URISyntaxException { Configuration config = new Configuration(); config.set("fs.defaultFS", "hdfs://node01:8020"); return FileSystem.newInstance(new URI("/"), config); } public static void main(String[] args) throws Exception { FileSystem fs = getFileSystem(); FileSystem fs2 = getFileSystem_2(); //添加文件夹 boolean mkdirs = fs.mkdirs(new Path("/hadoop002")); if (mkdirs){ System.out.println("创建文件成功!!!"); }else{ System.out.println("创建文件失败!!!"); } //删除文件夹 boolean b = fs.delete(new Path("/hadoop001"), true); if (b){ System.out.println("删除成功!"); }else{ System.out.println("删除失败!"); } //数据修改 boolean b1 = fs.rename(new Path("/test001"), new Path("/test100")); if (b1){ System.out.println("修改成功!"); }else{ System.out.println("修改失败!"); } //查询 FileStatus[] fileStatuses = fs.listStatus(new Path("/")); for (FileStatus file : fileStatuses) { System.out.println(file.getPermission());//权限 System.out.println(file.getGroup());//所属组 System.out.println(file.getOwner());//所属用户 System.out.println(file.getLen());//Size System.out.println(format.format(file.getModificationTime()));//时间 System.out.println(file.getReplication());//Replication System.out.println(file.getBlockSize());//Block Size System.out.println(file.getPath().getName());//文件名 System.out.println(format.format(file.getAccessTime())); } //数据上传 try { //第一个参数是选择本地桌面的文件,第二个参数是选择上传到集群的路径 fs.copyFromLocalFile(new Path("C:\\Users\\Administrator\\Desktop\\aa.txt"),new Path("/")); } catch (Exception e) { System.out.println("上传失败"); return; } System.out.println("上传成功"); //数据下载 fs.copyToLocalFile(false,new Path("/aa.txt"),new Path("D:\\"),true); fs.close(); System.out.println("下载完毕"); //创建文件 boolean b2 = fs.createNewFile(new Path("/zhangsanfeng.txt")); if (b2){ System.out.println("文件创建成功!!"); }else{ System.out.println("文件创建失败!"); } } }
最新回复(0)