HdfsClientHandler.java 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162
  1. package com.primeton.damp.fileclient;
  2. import org.apache.hadoop.fs.FileSystem;
  3. import org.slf4j.Logger;
  4. import org.slf4j.LoggerFactory;
  5. import java.io.IOException;
  6. import java.io.InputStream;
  7. import java.io.OutputStream;
  8. import java.nio.file.Path;
  9. import java.nio.file.Paths;
  10. import java.util.Arrays;
  11. import java.util.Collections;
  12. import java.util.List;
  13. import java.util.Properties;
  14. import java.util.stream.Collectors;
  15. /**
  16. * <pre>
  17. *
  18. * Created by zhaopx.
  19. * User: zhaopx
  20. * Date: 2021/4/1
  21. * Time: 17:55
  22. *
  23. * </pre>
  24. *
  25. * @author zhaopx
  26. */
  27. public class HdfsClientHandler implements X11ClientHandler {
  28. protected static Logger logger = LoggerFactory.getLogger("HdfsClientHandler");
  29. /**
  30. * 执行路径的基础路径
  31. */
  32. private final String basePath;
  33. /**
  34. * hadoop 文件系统
  35. */
  36. final FileSystem fs;
  37. public HdfsClientHandler(FileSystem fs) {
  38. this(fs, System.getProperty("user.name"));
  39. }
  40. public HdfsClientHandler(FileSystem fs, String basePath) {
  41. this.fs = fs;
  42. this.basePath = basePath;
  43. }
  44. @Override
  45. public void reconnect(Properties params) throws IOException {
  46. }
  47. @Override
  48. public OutputStream writeFile(String file, boolean overwrite) throws IOException {
  49. if(overwrite) {
  50. return fs.create(new org.apache.hadoop.fs.Path(file), true);
  51. }
  52. return fs.create(new org.apache.hadoop.fs.Path(file), false);
  53. }
  54. @Override
  55. public InputStream readFile(String file) throws IOException {
  56. return fs.open(new org.apache.hadoop.fs.Path(file));
  57. }
  58. @Override
  59. public boolean rename(String file, String newFileName) {
  60. final org.apache.hadoop.fs.Path oldFile = new org.apache.hadoop.fs.Path(file);
  61. final org.apache.hadoop.fs.Path newFile = new org.apache.hadoop.fs.Path(oldFile.getParent(), newFileName);
  62. try {
  63. return fs.rename(oldFile, newFile);
  64. } catch (IOException e) {
  65. throw new RuntimeException("mkdirs " + file + " error!", e);
  66. }
  67. }
  68. @Override
  69. public boolean deleteFile(String path) {
  70. final org.apache.hadoop.fs.Path file = new org.apache.hadoop.fs.Path(path);
  71. try {
  72. if (!fs.exists(file)) {
  73. return false;
  74. }
  75. return fs.delete(file, true);
  76. } catch (Exception e) {
  77. throw new RuntimeException("delete " + path + " error!", e);
  78. }
  79. }
  80. @Override
  81. public boolean mkdir(String path) {
  82. return mkdirs(path);
  83. }
  84. @Override
  85. public boolean mkdirs(String path) {
  86. try {
  87. return fs.mkdirs(new org.apache.hadoop.fs.Path(path));
  88. } catch (IOException e) {
  89. throw new RuntimeException("mkdirs " + path + " error!", e);
  90. }
  91. }
  92. @Override
  93. public List<Path> getChildren(String ftpPath) {
  94. final org.apache.hadoop.fs.Path file = new org.apache.hadoop.fs.Path(ftpPath);
  95. try {
  96. if(fs.exists(file) && fs.getFileStatus(file).isDirectory()) {
  97. return Arrays.stream(fs.listStatus(file)).map(it-> Paths.get(it.toString())).collect(Collectors.toList());
  98. }
  99. } catch (Exception e) {
  100. logger.error("getChildren " + ftpPath + " error!", e);
  101. }
  102. return Collections.emptyList();
  103. }
  104. @Override
  105. public boolean exists(String path) {
  106. final org.apache.hadoop.fs.Path file = new org.apache.hadoop.fs.Path(path);
  107. try {
  108. return fs.exists(file);
  109. } catch (Exception e) {
  110. logger.error("exists " + path + " error!", e);
  111. }
  112. return false;
  113. }
  114. @Override
  115. public boolean existsDir(String path) {
  116. final org.apache.hadoop.fs.Path file = new org.apache.hadoop.fs.Path(path);
  117. try {
  118. return fs.exists(file) && fs.getFileStatus(file).isDirectory();
  119. } catch (Exception e) {
  120. logger.error("existsDir " + path + " error!", e);
  121. }
  122. return false;
  123. }
  124. @Override
  125. public boolean existsFile(String path) {
  126. final org.apache.hadoop.fs.Path file = new org.apache.hadoop.fs.Path(path);
  127. try {
  128. return fs.exists(file) && fs.getFileStatus(file).isFile();
  129. } catch (Exception e) {
  130. logger.error("existsFile " + path + " error!", e);
  131. }
  132. return false;
  133. }
  134. @Override
  135. public void close() throws IOException {
  136. }
  137. }