千家信息网

HDFS基本常用命令是什么

发表于:2025-02-01 作者:千家信息网编辑
千家信息网最后更新 2025年02月01日,这篇文章主要介绍了HDFS基本常用命令是什么,具有一定借鉴价值,感兴趣的朋友可以参考下,希望大家阅读完这篇文章之后大有收获,下面让小编带着大家一起了解一下。一. 创建HDFS文件:public cla
千家信息网最后更新 2025年02月01日HDFS基本常用命令是什么

这篇文章主要介绍了HDFS基本常用命令是什么,具有一定借鉴价值,感兴趣的朋友可以参考下,希望大家阅读完这篇文章之后大有收获,下面让小编带着大家一起了解一下。

一. 创建HDFS文件:

public class Test4CreateFile {        /**         * 创建HDFS文件:         * */        public static void main(String[] args) {                try {                        Configuration conf = new Configuration();                        URI uri = new URI("hdfs://192.168.226.129:9000");                        byte[] buff = "Hello Hadoop HDFS".getBytes();                        FileSystem fs = FileSystem.get(uri, conf);                        Path dfs = new Path("hdfs://192.168.226.129:9000/studyhadoop");                        FSDataOutputStream outputStream = fs.create(dfs);                        outputStream.write(buff,0,buff.length);                        FileStatus files[] = fs.listStatus( dfs );                        for( FileStatus file:files){                                System.out.println( "file:  " + file.getPath() );                        }                } catch (Exception e) {                        e.printStackTrace();                }               }}

二:删除HDFS文件

public class Test5DeleteFile {        /**         * 删除HDFS文件:         * */        public static void main(String[] args) {                try {                        Configuration conf = new Configuration();                        URI uri = new URI("hdfs://192.168.226.129:9000");                        FileSystem fs = FileSystem.get(uri, conf);                        Path delef = new Path("hdfs://192.168.226.129:9000/testhadoop1");                        boolean isDeleted = fs.delete(delef, false);                        System.out.println( "isDelete: " + isDeleted );                } catch (Exception e) {                        e.printStackTrace();                }        }}

三:创建HDFS目录

public class Test9Mkdir {        /**         * HDFS下 创建目录文件         * */        public static void main(String[] args) {                try {                        Configuration conf = new Configuration();                        URI uri = new URI("hdfs://192.168.226.129:9000");                        FileSystem fs = FileSystem.get(uri, conf);                        Path dfs = new Path("hdfs://192.168.226.129:9000/testhadoop");                        boolean isMkdirs = fs.mkdirs(dfs);                        if( isMkdirs ){                                System.out.println( " Make Dir Successful ! ");                        }else{                                System.out.println( " Make Dir Failure ! ");                        }                        fs.close();                } catch (IllegalArgumentException e) {                        e.printStackTrace();                } catch (URISyntaxException e) {                        e.printStackTrace();                } catch (IOException e) {                        e.printStackTrace();                }        }}

四:重命名HDFS文件

public class Test2Rename {        /**         * 重命名HDFS文件:         * */        public static void main(String[] args) {                try {                        Configuration conf = new Configuration();                        URI uri = new URI("hdfs://192.168.226.129:9000");                        FileSystem fs = FileSystem.get(uri,conf);                        Path oldpath = new Path("hdfs://192.168.226.129:9000/testhadoop");                        Path newpath = new Path("hdfs://192.168.226.129:9000/testhadoop1");                        //判断该文件是否存在                        boolean isExists = fs.exists(oldpath);                        System.out.println( "isExists: " +isExists );                        //重命名文件                        fs.rename(oldpath, newpath);                        isExists = fs.exists(newpath);                        System.out.println( "newpathisExists: " +isExists );                } catch (Exception e) {                        e.printStackTrace();                }         }}

五:上传本地文件至HDFS

public class Test3CopyFile {        /**         * 上传本地文件到HDFS         * */        public static void main(String[] args) {                try {                        Configuration conf = new Configuration();                        URI uri = new URI("hdfs://192.168.226.129:9000");                        FileSystem fs = FileSystem.get(uri, conf);                        Path src = new Path("F:\\04-HadoopStudy\\mapreduce.txt");                        Path dst = new Path("hdfs://192.168.226.129:9000/rootdir");                        fs.copyFromLocalFile(src, dst);                        System.out.println("Upload " + conf.get("fs.default.name"));                        FileStatus files[] = fs.listStatus( dst );                        for( FileStatus file:files){                                System.out.println( file.getPath() );                        }                } catch (Exception e) {                        e.printStackTrace();                }         }}

六. 从HDFS下载文件至本地

public class Test10CopyToFile {        /**         * 从HDFS下载文件至本地         * */        public static void main(String[] args) {                try {                        Configuration conf = new Configuration();                        URI uri = new URI("hdfs://192.168.226.129:9000");                        FileSystem fs = FileSystem.get(uri, conf);                        Path src = new Path("F:\\");                        Path dst = new Path("hdfs://192.168.226.129:9000/studyhadoop");                        fs.copyToLocalFile(dst, src);                        System.out.println("DownLoad " + conf.get("fs.default.name"));                        FileStatus files[] = fs.listStatus( dst );                        for( FileStatus file:files){                                System.out.println( file.getPath() );                        }                } catch (IllegalArgumentException e) {                        e.printStackTrace();                } catch (FileNotFoundException e) {                        e.printStackTrace();                } catch (URISyntaxException e) {                        e.printStackTrace();                } catch (IOException e) {                        e.printStackTrace();                }        }}

感谢你能够认真阅读完这篇文章,希望小编分享的"HDFS基本常用命令是什么"这篇文章对大家有帮助,同时也希望大家多多支持,关注行业资讯频道,更多相关知识等着你来学习!

0