測試HDFS讀性能-讀到本地緩存,不寫硬盤的性能

如果測試讀完存到本地文件的話,直接用fs.copyToLocalFile()方法即可,但是如果測試讀到緩存的性能,則需要用到FSDataInputStream

上代碼:

/**
 * @ProjectName: Hadoop預研平臺
 */
package com.hikvision.hdfs.test.performance;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;


/**
 * <p>測試hdfs讀到本地緩存的性能</p>
 * @author 2013-1-10 下午5:05:36
 * @version V1.0   
 * @modificationHistory===邏輯或功能性重大變更記錄===
 */
public class TestReadPerformence {
	
	/**
	 * @author wanglongyf2 2013-1-10 下午5:05:37
	 * @param args:  文件路經  每次讀的大小(M爲單位)
 	 * @throws IOException 
	 */
	public static void main(String[] args) throws IOException {
		Configuration conf = HBaseConfiguration.create();
		conf.set("hbase.zookeeper.quorum", "node1,node2,node3");
		conf.set(/*"fs.defaultFS"*/"fs.default.name","hdfs://node1");
		FileSystem hdfs = FileSystem.get(conf);
		
		String fileName = "/hbase/testscan/5f525e5fc37da7f1fc0c42b4a5a3be0b/f1/2820765239803238635";//1g
		if(args.length >= 1) {
			fileName = args[0];
		}
		int mNumber = 1;
		if(args.length ==2 ) {
			mNumber = Integer.valueOf(args[1]);
		}
		//fileName = "/hbase/testscan/5f525e5fc37da7f1fc0c42b4a5a3be0b/f1/6774381191569120295";
		FSDataInputStream dis = hdfs.open(new Path(fileName));
		long start = System.currentTimeMillis();
		byte[] b = new byte[mNumber * 1024 * 1024];
		int read = dis.read(b);
		long total = 0;
		while(read > 0) {
			total += read;
			read = dis.read(b);
	//		System.out.println(total/1024/1024);
		}
		long stop = System.currentTimeMillis();
		long use = stop - start;
		double rate = 1000.0 * total / (use * 1024 * 1024);
		System.out.println("total: " + total +". use: " + use + ". rate:M/s:" + rate );		
	}
}


 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章