這期內(nèi)容當(dāng)中小編將會給大家?guī)碛嘘P(guān)hadoop中怎么將文件上傳到指定datanode,文章內(nèi)容豐富且以專業(yè)的角度為大家分析和敘述,閱讀完這篇文章希望大家可以有所收獲。

package hgs.dfsclient.test;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsCreateModes;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSOutputStream;
import org.apache.hadoop.hdfs.protocol.*;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.EnumSet;
import java.util.List;
import java.util.Random;
public class MainTest {
public static void main(String[] args) throws IOException, URISyntaxException {
Configuration conf = new Configuration();
conf.addResource(new File("D://hdfsconf/core-site.xml").toURI().toURL());
conf.addResource(new File("D://hdfsconf/hdfs-site.xml").toURI().toURL());
String url = conf.get("fs.defaultFS");
System.out.println(url);
Path path = new Path("/user/test.txt");
DFSClient client = new DFSClient(new URI("hdfs://192.168.0.191:9000/"),conf);
//favor datanode
InetSocketAddress [] favor = {new InetSocketAddress("192.168.0.191", 50012)};
createAndFillFile(client,conf,favor,"/user/test.txt4");
listBlockLocation(client, conf, "/user/test.txt4",true );
client.close();
}
//將數(shù)據(jù)傳輸?shù)街付ǖ膁atanode
public static void createAndFillFile(DFSClient client,Configuration conf,
InetSocketAddress [] favor,String src) throws IOException {
DFSOutputStream dfsOutputStream = client.create(src, FsCreateModes.applyUMask(
FsPermission.getFileDefault(), FsPermission.getUMask(conf)), EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
true, (short) 1, 32 * 1024 * 1024,
null, 8192, null, favor, null);
byte[] b = new byte[1024*3*3];
for(int i = 0 ;i<100000;i++){
fileByte(b);
dfsOutputStream.write(b);
}
dfsOutputStream.close();
}
static Random rand = new Random(System.nanoTime());
public static void fileByte(byte[] b){
if(b != null ){
rand.nextBytes(b);
}
}
//獲取文件的block位置
public static void listBlockLocation(DFSClient client,Configuration conf,String src,boolean needLocation) throws IOException {
DirectoryListing directoryListing = client.listPaths(src, HdfsFileStatus.EMPTY_NAME,
needLocation);
HdfsFileStatus[] fileStatuses = directoryListing.getPartialListing();
for(int i=0 ; i <fileStatuses.length ; i++){
HdfsLocatedFileStatus hl = (HdfsLocatedFileStatus)fileStatuses[i];
LocatedBlocks locatedBlocks = hl.getLocatedBlocks();
List<LocatedBlock> locatedBlocks1 = locatedBlocks.getLocatedBlocks();
for(LocatedBlock lb : locatedBlocks1 ){
DatanodeInfo[] locations = lb.getLocations();
for(DatanodeInfo li : locations){
System.out.println("addr:"+li.getIpAddr()+"--port:"+li);
}
}
}
}
}上述就是小編為大家分享的hadoop中怎么將文件上傳到指定datanode了,如果剛好有類似的疑惑,不妨參照上述分析進行理解。如果想知道更多相關(guān)知識,歡迎關(guān)注創(chuàng)新互聯(lián)-成都網(wǎng)站建設(shè)公司行業(yè)資訊頻道。
網(wǎng)站名稱:hadoop中怎么將文件上傳到指定datanode-創(chuàng)新互聯(lián)
URL標(biāo)題:http://chinadenli.net/article4/cdcooe.html
成都網(wǎng)站建設(shè)公司_創(chuàng)新互聯(lián),為您提供網(wǎng)站維護、品牌網(wǎng)站建設(shè)、域名注冊、網(wǎng)頁設(shè)計公司、關(guān)鍵詞優(yōu)化、網(wǎng)站營銷
聲明:本網(wǎng)站發(fā)布的內(nèi)容(圖片、視頻和文字)以用戶投稿、用戶轉(zhuǎn)載內(nèi)容為主,如果涉及侵權(quán)請盡快告知,我們將會在第一時間刪除。文章觀點不代表本網(wǎng)站立場,如需處理請聯(lián)系客服。電話:028-86922220;郵箱:631063699@qq.com。內(nèi)容未經(jīng)允許不得轉(zhuǎn)載,或轉(zhuǎn)載時需注明來源: 創(chuàng)新互聯(lián)
猜你還喜歡下面的內(nèi)容