编写Java代码对HDFS进行增删改查操作代码实例

2025-05-29 0 87

本文实例为大家分享了java代码对hdfs进行增删改查操作的具体代码,供大家参考,具体内容如下

?

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104
import java.io.file;

import java.io.fileoutputstream;

import java.io.ioexception;

import java.net.uri;

import org.apache.commons.compress.utils.ioutils;

import org.apache.hadoop.conf.configuration;

import org.apache.hadoop.fs.blocklocation;

import org.apache.hadoop.fs.fsdatainputstream;

import org.apache.hadoop.fs.fsdataoutputstream;

import org.apache.hadoop.fs.filestatus;

import org.apache.hadoop.fs.filesystem;

import org.apache.hadoop.fs.path;

public class fileopreation {

public static void main(string[] args) throws ioexception {

//createfile();

//deletefile();

//copyfiletohdfs();

//mkdirs();

//deldirs();

listdirectory();

download();

}

public static void createfile() throws ioexception {

string uri = "hdfs://alvis:9000";

configuration configuration =new configuration();

filesystem fsystem = filesystem.get(uri.create(uri), configuration);

byte[] file_content_buff="hello hadoop world, test write file !\\n".getbytes();

path dfs = new path("/home/test.txt");

fsdataoutputstream outputstream = fsystem.create(dfs);

outputstream.write(file_content_buff.length);

}

public fileopreation() {

// todo auto-generated constructor stub

}public static void deletefile() throws ioexception {

string uri = "hdfs://alvis:9000";

configuration configuration =new configuration();

filesystem fsystem = filesystem.get(uri.create(uri), configuration);

path deletf = new path("/home/test.txt");

boolean delresult = fsystem.delete(deletf,true);

system.out.println(delresult==true?"删除成功":"删除失败");

}

public static void copyfiletohdfs() throws ioexception {

string uri = "hdfs://alvis:9000";

configuration configuration =new configuration();

filesystem fsystem = filesystem.get(uri.create(uri), configuration);

path src = new path("e:\\\\serializationtest\\\\apitest.txt");

path dest_src = new path("/home");

fsystem.copyfromlocalfile(src, dest_src);

}

public static void mkdirs() throws ioexception {

string uri = "hdfs://alvis:9000";

configuration configuration =new configuration();

filesystem fsystem = filesystem.get(uri.create(uri), configuration);

path src = new path("/test");

fsystem.mkdirs(src);

}

public static void deldirs() throws ioexception {

string uri = "hdfs://alvis:9000";

configuration configuration = new configuration();

filesystem fsystem = filesystem.get(uri.create(uri), configuration);

path src = new path("/test");

fsystem.delete(src);

}

public static void listdirectory() throws ioexception {

string uri = "hdfs://alvis:9000";

configuration configuration = new configuration();

filesystem fsystem = filesystem.get(uri.create(uri), configuration);

filestatus[] fstatus = fsystem.liststatus(new path("/output"));

for(filestatus status : fstatus)

if (status.isfile()) {

system.out.println("文件路径:"+status.getpath().tostring());

system.out.println("文件路径 getreplication:"+status.getreplication());

system.out.println("文件路径 getblocksize:"+status.getblocksize());

blocklocation[] blocklocations = fsystem.getfileblocklocations(status, 0, status.getblocksize());

for(blocklocation location : blocklocations){

system.out.println("主机名:"+location.gethosts()[0]);

system.out.println("主机名:"+location.getnames()[0]);

}

}

else {

system.out.println("directory:"+status.getpath().tostring());

}

}

public static void download() throws ioexception {

configuration configuration = new configuration();

configuration.set("fs.defaultfs", "hdfs://alvis:9000");

filesystem fsystem =filesystem.get(configuration);

fsdatainputstream inputstream =fsystem.open( new path("/input/wc.jar"));

fileoutputstream outputstream = new fileoutputstream(new file("e:\\\\learnlife\\\\download\\\\wc.jar"));

ioutils.copy(inputstream, outputstream);

system.out.println("下载成功!");

}

}

思想:

一、定义虚拟机接口

二、先拿到hdfs远程调用接口对象configuration

三、定义分布式文件系统filesystem对象获取对象

四、给定路径

五、用filesystem对象调用操作

以上所述是小编给大家介绍的java代码对hdfs进行增删改查操作详解整合,希望对大家有所帮助,如果大家有任何疑问请给我留言,小编会及时回复大家的。在此也非常感谢大家对快网idc网站的支持!

原文链接:https://blog.csdn.net/qq_41395106/article/details/89036014

收藏 (0) 打赏

感谢您的支持,我会继续努力的!

打开微信/支付宝扫一扫,即可进行扫码打赏哦,分享从这里开始,精彩与您同在
点赞 (0)

声明:本站所有文章,如无特殊说明或标注,均为本站原创发布。任何个人或组织,在未征得本站同意时,禁止复制、盗用、采集、发布本站内容到任何网站、书籍等各类媒体平台。如若本站内容侵犯了原著者的合法权益,可联系我们进行处理。

快网idc优惠网 建站教程 编写Java代码对HDFS进行增删改查操作代码实例 https://www.kuaiidc.com/109476.html

相关文章

发表评论
暂无评论