HDFS数据清理一些办法:
datanode数据做reblance清理临时目录、日志目录文件全量分区表历史分区清理使用lzo,orc格式进行数据压缩清理或者归档历史冷数据增加datanode横向扩容附上自动清理目录下过期的文件
#!/bin/bash source ~/.bash_profile # 将待检测的目录(可以为多个)加载至数组中 yarn_log_dir=/app-logs/spark/logs spark_log_dir=/spark-history spark2_log_dir=/spark2-history mr_log_dir=/mr-history/done/$(date +"%Y/%m" -d "-1 days") array_check=($yarn_log_dir $mr_log_dir) # 过期天数 expire_days=14 # 当前时间戳 today_timestamp=$(date -d "$(date +"%Y-%m-%d %H:%M")" +%s) #Func: 删除指定时间之前的过期 removeOutDate(){ hadoop fs -ls $1 > temp_list.txt cat temp_list.txt | while read quanxian temp user group size day hour filepath do current_file_time="$day $hour" current_file_timestamp=$(date -d "$current_file_time" +%s) if [ $(($today_timestamp-$current_file_timestamp)) -ge $(($expire_days*24*60*60)) ];then echo "$day $hour $size $filepath" hadoop fs -rm -r -skipTrash $filepath > /dev/null 2>&1 fi done } #Func: 执行删除 execute(){ echo -e "\n\n" echo "$(date +'%Y-%m-%d %H:%M:%S') start to remove outdate files in hdfs" echo "$(date +'%Y-%m-%d %H:%M:%S') today is: $(date +"%Y-%m-%d %H:%M:%S")" for i in ${array_check[@]} do echo "$(date +'%Y-%m-%d %H:%M:%S') processing filepath: $i" removeOutDate $i echo -e "\n" done echo "$(date +'%Y-%m-%d %H:%M:%S') remove outdate files in hdfs finished" echo -e "\n\n" rm -f temp_list.txt } # 开始执行 execute