应用系统中可能会被要求保存一些文件记录,比如配置文件修改的备份,日志文件,备份文件。不过历史文件不一定是要求永久保留的,一般会有合规要求和业务要求,超过一定期限的可以删除,以释放空间,提高查询检索效率。
这个历史文件清理可以做成定时任务,将各种需要清理的文件分类,根据不同的文件生命周期要求进行清理。
下面的例程中,ftp下用户目录的文件夹按15天以上清理,filezilla服务器配置文件users.xml的备份文件按5天以上清理,上传文件临时目录按8小时以上清理
javascript
const fs = require('fs')
const path = require("path");
var log4js = require('log4js');
log4js.configure({
appenders: {
Clean: {
type: "dateFile",
filename: "logs/clean",
pattern: 'yyyy-MM-dd.log',
alwaysIncludePattern: true,
numbackups:14,
layout: { type: "pattern", pattern: "%d %p %c %m%n" }
}
},
categories: { default: { appenders: ["Clean"], level: "info" } }
});
const logger = log4js.getLogger("Clean");
const uploadlifetime=8; //8小时
const ftplifetime=15; //15天
const fzlifetime=5; //5天
const uploads=["e:/rdfile/upload/dest"];
const ftps=["e:/rdfile/ftp1root","e:/rdfile/ftp2root","e:/rdfile/ftp3root","e:/rdfile/ftp4root"];
const fzs=["c:/FZ1","c:/FZ2","c:/FZ3","c:/FZ4"]
function rmdirp(dir) {
return new Promise((resolve, reject) => {
fs.stat(dir, (err, status)=> {
if (err) reject(err);
if (status.isDirectory()) {
fs.readdir(dir, (err, file)=> {
if (err) reject(err);
let res = file.map((item) => rmdirp(path.join(dir, item)))
Promise.all(res).then( () => {
fs.rmdir(dir, err=>{ if (err) reject(err); resolve(); });
})
})
}
else {
fs.unlink(dir, err=>{ if (err) reject(err); resolve(); });
}
})
})
}
function cleanupload(targetPath){
return new Promise((resolve, reject) => {
logger.info(targetPath+" clean start");
fs.readdir(targetPath,(err,subdirs)=>{
if (err) reject(err);
let res = subdirs.map((item) => {
let fullpath=targetPath+"/"+item;
fs.stat(fullpath,(err,sds) =>{
if (err) reject(err);
let sdsage=(now-sds.birthtimeMs)/1000/3600;
if (sdsage>uploadlifetime) { rmdirp(fullpath); }
else { resolve(); }
})
})
Promise.all(res).then( () => { resolve(); })
});
})
}
function cleanftp(targetPath){
return new Promise((resolve, reject) => {
logger.info(targetPath+" clean start");
fs.readdir(targetPath,(err,userdirs)=>{
if (err) reject(err);
let res = userdirs.map((userdir) => {
let stat = fs.statSync(targetPath+"/"+userdir);
if (stat.isDirectory()) {
fs.readdir(targetPath+"/"+userdir,(err1,usersubdirs)=>{
if (err1) reject(err1);
let res1 = usersubdirs.map((usersubdir) => {
let fullpath=targetPath+"/"+userdir+"/"+usersubdir;
fs.stat(fullpath,(err2,sds) =>{
if (err2) reject(err2);
let sdsage=(now-sds.birthtimeMs)/1000/3600/24;
if (sdsage>ftplifetime) { rmdirp(fullpath); }
else { resolve(); }
})
})
Promise.all(res1).then( () => { resolve(); })
});
}
else { resolve(); }
})
Promise.all(res).then( () => { resolve(); })
});
});
}
function cleanfz(targetPath){
return new Promise((resolve, reject) => {
logger.info(targetPath+" clean start");
fs.readdir(targetPath,(err,items)=>{
if (err) reject(err);
let res = items.filter(itm=>itm.startsWith("users.20")).map((item) => {
let fullpath=targetPath+"/"+item;
fs.stat(fullpath,(err,sds) =>{
if (err) reject(err);
let sdsage=(now-sds.birthtimeMs)/1000/3600/24;
if (sdsage>fzlifetime) { fs.unlink(fullpath, err=>{ if (err) reject(err); resolve(); }); }
else { resolve(); }
})
})
Promise.all(res).then( () => { resolve(); })
});
})
}
var now=new Date().getTime();
uploads.forEach(item=>{
cleanupload(item).then((resolve) =>{ logger.info(item+" clean done"); });
});
ftps.forEach(item=>{
cleanftp(item).then((resolve) =>{ logger.info(item+" clean done"); });
})
fzs.forEach(item=>{
cleanfz(item).then((resolve) =>{ logger.info(item+" clean done"); });
})