The path to the file to be deleted changes as the output directory uses date for naming. There are 2 ways to delete it:
Over shell, try this:
hadoop dfs -rmr hdfs://127.0.0.1:9000/home/hadoop/temp-output-s3copy-*
To do it via Java code:
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.mortbay.log.Log;
public class FSDeletion {
public static void main(String[] args) {
try {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
String fsName = conf.get("fs.default.name", "localhost:9000");
String baseDir = "/home/hadoop/";
String outputDirPattern = fsName + baseDir + "temp-output-s3copy-";
Path[] paths = new Path[1];
paths[0] = new Path(baseDir);
FileStatus[] status = fs.listStatus(paths);
Path[] listedPaths = FileUtil.stat2Paths(status);
for (Path p : listedPaths) {
if (p.toString().startsWith(outputDirPattern)) {
Log.info("Attempting to delete : " + p);
boolean result = fs.delete(p, true);
Log.info("Deleted ? : " + result);
}
}
fs.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}