import
java.io.IOException;
import
java.util.ArrayList;
import
java.util.List;
import
org.apache.hadoop.conf.Configuration;
import
org.apache.hadoop.fs.FileStatus;
import
org.apache.hadoop.fs.FileSystem;
import
org.apache.hadoop.fs.Path;
import
org.apache.hadoop.hdfs.DistributedFileSystem;
import
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import
org.apache.hadoop.fs.BlockLocation;
import
static
java.lang.System.out;
public
class
FileStatusTest {
private
static
Configuration config =
new
Configuration();
private
static
FileSystem hdfs =
null
;
private
static
FileStatus[] status =
null
;
public
static
void
main(String[] args)
throws
IOException{
List<String> namelist =
new
ArrayList<String>();
List<String> hostlist =
new
ArrayList<String>();
config.addResource(
"hdfs-site.xml"
);
config.addResource(
"core-site.xml"
);
out.println(config.get(
"dfs.namenode.name.dir"
));
out.println(config.get(
"fs.defaultFS"
));
FileSystem hdfs = FileSystem.get( config);
try
{
DistributedFileSystem dishdfs = (DistributedFileSystem) hdfs;
DatanodeInfo[] nodeStats = dishdfs.getDataNodeStats();
String[] names =
new
String[nodeStats.length];
for
(
int
i =
0
; i < nodeStats.length; i++) {
out.println(
"hostname is "
+ nodeStats[i].getName());
out.println(
"dfs used is "
+ nodeStats[i].getDfsUsed());
out.println(nodeStats[i].getDatanodeReport());
}
}
catch
(Exception e){
e.printStackTrace();
}
out.println(hdfs.getClass().getName());
String file = args[
0
];
try
{
if
(!hdfs.exists(
new
Path (file))){
return
;
}
status = hdfs.listStatus(
new
Path (file));
out.println(
"file num is "
+ status.length);
for
(FileStatus f: status) {
namelist.add(f.getPath().toString());
out.println(
"file name is "
+ f.getPath());
BlockLocation[] blks = hdfs.getFileBlockLocations(f,
0
, f.getLen());
for
(BlockLocation blk:blks){
out.println(
"blk name is "
+ blk.toString());
out.println(
"length is "
+ blk.getLength());
}
}
}
catch
(Exception e){
e.printStackTrace();
}
}
}