package eu.dnetlib.data.mdstore.manager.utils; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import eu.dnetlib.data.mdstore.manager.exceptions.MDStoreManagerException; @Component public class HdfsClient { @Value("${dhp.mdstore-manager.hadoop.cluster}") private String hadoopCluster; @Value("${dhp.mdstore-manager.hadoop.user}") private String hadoopUser; private static final Log log = LogFactory.getLog(HdfsClient.class); public void deletePath(final String path) throws MDStoreManagerException { final Configuration conf = new Configuration(); System.setProperty("HADOOP_USER_NAME", hadoopUser); if (hadoopCluster.equalsIgnoreCase("OCEAN")) { conf.addResource(getClass().getResourceAsStream("/hadoop/OCEAN/core-site.xml")); conf.addResource(getClass().getResourceAsStream("/hadoop/OCEAN/ocean-hadoop-conf.xml")); } else if (hadoopCluster.equalsIgnoreCase("GARR")) { conf.addResource(getClass().getResourceAsStream("/hadoop/GARR/core-site.xml")); conf.addResource(getClass().getResourceAsStream("/hadoop/GARR/garr-hadoop-conf.xml")); } else { log.error("Invalid Haddop Cluster: " + hadoopCluster); throw new MDStoreManagerException("Invalid Haddop Cluster: " + hadoopCluster); } try (final FileSystem fs = FileSystem.get(conf)) { fs.delete(new Path(path), true); log.info("HDFS Path deleted: " + path); } catch (IllegalArgumentException | IOException e) { log.error("Eror deleting path: " + path, e); throw new MDStoreManagerException("Eror deleting path: " + path, e); } } public String getHadoopCluster() { return hadoopCluster; } public void setHadoopCluster(final String hadoopCluster) { this.hadoopCluster = hadoopCluster; } public String getHadoopUser() { return hadoopUser; } public void setHadoopUser(final String hadoopUser) { this.hadoopUser = hadoopUser; } }