java监控目录实时上传HDFS

背景描述:

为了满足linux服务器上特定目录的非结构化文件的实时监控,并上传HDFS

使用的方法

Apache的Commons-IO,来实现文件的监控功能

所需要的pom

java 复制代码
<dependencies>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>3.0.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>3.0.0</version>
        </dependency>
        <dependency>
            <groupId>commons-io</groupId>
            <artifactId>commons-io</artifactId>
            <version>2.6</version>
        </dependency>
        <dependency>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-lang3</artifactId>
            <version>3.9</version>
        </dependency>
        <dependency>
            <groupId>com.google.code.findbugs</groupId>
            <artifactId>jsr305</artifactId>
            <version>1.3.9</version>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <version>1.18.4</version>
        </dependency>
        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.28</version>
            <scope>compile</scope>
        </dependency>
        <dependency>
            <groupId>com.alibaba.fastjson2</groupId>
            <artifactId>fastjson2</artifactId>
            <version>2.0.26</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/cn.hutool/hutool-all -->
        <dependency>
            <groupId>cn.hutool</groupId>
            <artifactId>hutool-all</artifactId>
            <version>5.8.22</version>
        </dependency>

    </dependencies>
java 复制代码
public static void copyFile2HDFS(URI hdfsURI, String username, String srcPath, String newPath) {
        try {
            Configuration conf = new Configuration();
            FileSystem fs = FileSystem.get(hdfsURI, conf, username);
            Path src = new Path(srcPath);
            Path dst = new Path(newPath);
            if (fs.exists(dst)) {
                fs.delete(dst, true);
            }
            fs.copyFromLocalFile(src, dst);
            fs.close();
            System.out.println("Upload Successfully!");
        } catch (Exception e) {
            e.printStackTrace();
            StaticLog.info("复制文件失败{}", e.getMessage());
        }
    }
java 复制代码
public static String getHDFSPath(File file) {
        // 判断文件格式,包括视频、图片、文本和音频等,你可以根据实际需求进行修改
        String fileName = file.getName();
        String extension = fileName.substring(fileName.lastIndexOf(".") + 1).toLowerCase();
        if (extension.equals("mp4") || extension.equals("avi") || extension.equals("mov")) {
            return "/data/shipin/" + file.getName();
        } else if (extension.equals("jpg") || extension.equals("png")) {
            return "/data/txt/" + file.getName();
        } else if (extension.equals("m4a") || extension.equals("wav")) {
            return "/data/yuyin/" + file.getName();
        } else if (extension.equals("txt")) {
            return "/data/wenjian/" + file.getName();
        } else {
            return "/data/" + file.getName();
        }
    }
复制代码
FileMonitorTest.java
java 复制代码
//
// Source code recreated from a .class file by IntelliJ IDEA
// (powered by FernFlower decompiler)
//

package com.xxx.fileSync;

import java.util.concurrent.TimeUnit;
import org.apache.commons.io.filefilter.FileFilterUtils;
import org.apache.commons.io.filefilter.IOFileFilter;
import org.apache.commons.io.monitor.FileAlterationMonitor;
import org.apache.commons.io.monitor.FileAlterationObserver;

public class FileMonitorTest {
    public FileMonitorTest() {
    }

    public static void main(String[] arugs) throws Exception {
        String absolateDir = "/opt/xxxx";
        long intervalTime = TimeUnit.SECONDS.toMillis(5L);
        new FileAlterationObserver(absolateDir, FileFilterUtils.and(new IOFileFilter[]{FileFilterUtils.fileFileFilter(), FileFilterUtils.suffixFileFilter(".success")}));
        FileAlterationObserver observer = new FileAlterationObserver(absolateDir);
        observer.addListener(new FileListener());
        FileAlterationMonitor monitor = new FileAlterationMonitor(intervalTime, new FileAlterationObserver[]{observer});
        monitor.start();
    }
}
复制代码
FileListener.java重写方法
java 复制代码
//
// Source code recreated from a .class file by IntelliJ IDEA
// (powered by FernFlower decompiler)
//

package com.xxx.fileSync;

import java.io.File;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
import org.apache.commons.io.monitor.FileAlterationObserver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class FileListener extends FileAlterationListenerAdaptor {
    private static final Logger log = LoggerFactory.getLogger(FileListener.class);
    URI uri = new URI("hdfs://xxxxx:802xx0");
    String newPath = "";
    String newHDFSPath = "";
    String userName = "root";

    public FileListener() throws URISyntaxException {
    }

    public void onStart(FileAlterationObserver observer) {
        super.onStart(observer);
    }

    public void onDirectoryCreate(File directory) {
        this.newPath = "/data" + directory.getName();
        System.out.println("文件路径:" + directory.getAbsolutePath() + " 文件夹创建:" + directory.getName());
        FileUtil.newDir2HDFS(this.uri, this.userName, this.newPath);
        log.info("[Deleted Directory] : {}", directory.getAbsolutePath());
    }

    public void onDirectoryChange(File directory) {
        log.info("[Changed Directory] : {}", directory.getAbsolutePath());
    }

    public void onDirectoryDelete(File directory) {
        log.info("[Created Directory] : {}", directory.getAbsolutePath());
    }

    public void onFileCreate(File file) {
        try {
            log.info("[Created File] : {}", file.getAbsolutePath());
            this.newHDFSPath = FileUtil.getHDFSPath(file);
            this.newPath = FileUtil.getDestPath(file);
            System.out.println("监控源文件路径:" + file.toPath());
            System.out.println("监控源文件路径:" + file.getAbsolutePath() + " 目标HDFS文件创建:" + this.newHDFSPath);
            System.out.println("监控源文件路径:" + file.getAbsolutePath() + " 目标Linux文件创建:" + this.newPath);
            FileUtil.copyFile2HDFS(this.uri, this.userName, file.getAbsolutePath(), this.newHDFSPath);
            Files.copy(file.toPath(), (new File(this.newPath)).toPath(), StandardCopyOption.REPLACE_EXISTING);
        } catch (Throwable var3) {
            throw var3;
        }
    }

    public void onFileChange(File file) {
        try {
            log.info("[Amended File] : {}", file.getAbsolutePath());
            this.newPath = FileUtil.getDestPath(file);
            FileUtil.copyFile2HDFS(this.uri, this.userName, file.getAbsolutePath(), this.newPath);
            Files.copy(file.toPath(), (new File(this.newPath)).toPath(), StandardCopyOption.REPLACE_EXISTING);
        } catch (Throwable var3) {
            throw var3;
        }
    }

    public void onFileDelete(File file) {
        try {
            log.info("[Deleted File] : {}", file.getAbsolutePath());
            this.newHDFSPath = FileUtil.getHDFSPath(file);
            this.newPath = FileUtil.getDestPath(file);
            FileUtil.delFile2HDFS(this.uri, this.userName, this.newHDFSPath);
            Files.delete((new File(this.newPath)).toPath());
        } catch (Throwable var3) {
            throw var3;
        }
    }

    public void onStop(FileAlterationObserver observer) {
        super.onStop(observer);
    }
}
相关推荐
day3ZY17 分钟前
清理C盘缓存,电脑缓存清理怎么一键删除,操作简单的教程
c语言·开发语言·缓存
学地理的小胖砸30 分钟前
【高分系列卫星简介】
开发语言·数码相机·算法·遥感·地理信息
码农豆豆32 分钟前
4.C++中程序中的命名空间
开发语言·c++
怀九日35 分钟前
C++(学习)2024.9.19
开发语言·c++·学习·重构·对象·
希忘auto38 分钟前
Java之线程篇四
java
蓝黑20201 小时前
Java知识点小结3:内存回收
java·gc
KookeeyLena81 小时前
如何限制任何爬虫爬取网站的图片
开发语言·c++·爬虫
yanyanwenmeng1 小时前
matlab基础
开发语言·算法·matlab
Yz98761 小时前
Hadoop里面MapReduce的序列化与Java序列化比较
java·大数据·jvm·hadoop·分布式·mapreduce·big data
凯哥Java1 小时前
优化批处理流程:自定义BatchProcessorUtils的设计与应用
java·数据库·mysql