1. 程式人生 > >大資料教程(7.5)hadoop中內建rpc框架的使用教程

大資料教程(7.5)hadoop中內建rpc框架的使用教程

         博主上一篇部落格分享了hadoop客戶端java API的使用,本章節帶領小夥伴們一起來體驗下hadoop的內建rpc框架。首先,由於hadoop的內建rpc框架的設計目的是為了內部的元件提供rpc訪問的功能,並不是作為專業開源rpc(如dubbo、springcloud、hsf類的軟體)的路線來走的,所以其一般不會被我們用於自己的普通web專案裡面。但是,作為後臺專案,我們其實是可以獨立使用hadoop rpc的,而且不需要搭建hadoop環境。

        下面來看看使用案例:

        在我們前面的章節中,hadoop的namenode和datanode之間、resource manager和node manager直接都是有rpc呼叫的,以下是一個模擬使用過程:

        定義遠端呼叫介面

package com.empire.hadoop.hadoop_rpc.protocol;

/**
 * 類ClientNamenodeProtocol.java的實現描述:模擬 namenode協議功能介面
 * 
 * @author arron 2018年11月14日 下午11:09:55
 */
public interface ClientNamenodeProtocol {
    /**
     * client version客戶端版本號
     */
    public static final long versionID = 1L; //會讀取這個版本號, 但可以和客戶端的不一樣, 沒有校驗

    public String getMetaData(String path);
}
package com.empire.hadoop.hadoop_rpc.protocol;

/**
 * 類IUserLoginService.java的實現描述: 使用者登入 service
 * 
 * @author arron 2018年11月14日 下午11:30:45
 */
public interface IUserLoginService {
    /**
     * client version協議的版本號
     */
    public static final long versionID = 100L;

    public String login(String name, String passwd);

}

         介面實現

package com.empire.hadoop.hadoop_rpc.service;

import com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol;

/**
 * 類MyNameNode.java的實現描述:模擬 namenode協議功能介面實現類
 * 
 * @author arron 2018年11月14日 下午11:31:37
 */
public class MyNameNode implements ClientNamenodeProtocol {

    //模擬namenode的業務方法之一:查詢元資料
    public String getMetaData(String path) {
        return path + ": 3 - {BLK_1,BLK_2} ....";
    }

}
package com.empire.hadoop.hadoop_rpc.service;

import com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService;

/**
 * 類UserLoginServiceImpl.java的實現描述:使用者登入service實現類
 * 
 * @author arron 2018年11月14日 下午11:32:16
 */
public class UserLoginServiceImpl implements IUserLoginService {

    public String login(String name, String passwd) {
        return name + "logged in successfully...";
    }
}

        遠端服務釋出

package com.empire.hadoop.hadoop_rpc.service;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RPC.Builder;
import org.apache.hadoop.ipc.RPC.Server;

import com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol;
import com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService;

/**
 * 類PublishServiceUtil.java的實現描述:服務釋出工具類
 * 
 * @author arron 2018年11月14日 下午11:32:01
 */
public class PublishServiceUtil {

    public static void main(String[] args) throws Exception {
        Builder builder = new RPC.Builder(new Configuration());
        builder.setBindAddress("localhost").setPort(8888).setProtocol(ClientNamenodeProtocol.class)
                .setInstance(new MyNameNode());

        Server server = builder.build();
        server.start();

        Builder builder2 = new RPC.Builder(new Configuration());
        builder2.setBindAddress("localhost").setPort(9999).setProtocol(IUserLoginService.class)
                .setInstance(new UserLoginServiceImpl());

        Server server2 = builder2.build();
        server2.start();

    }

}

        客戶端呼叫

package com.empire.hadoop.hadoop_rpc.client;

import java.net.InetSocketAddress;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;

import com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol;

/**
 * 類MyHdfsClient.java的實現描述:rpc遠端客戶端
 * 
 * @author arron 2018年11月14日 下午11:30:04
 */
public class MyHdfsClient {

    public static void main(String[] args) throws Exception {
        ClientNamenodeProtocol namenode = RPC.getProxy(ClientNamenodeProtocol.class, 1L,
                new InetSocketAddress("localhost", 8888), new Configuration());
        String metaData = namenode.getMetaData("/angela.mygirl");
        System.out.println(metaData);
    }

}
package com.empire.hadoop.hadoop_rpc.client;

import java.net.InetSocketAddress;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.RPC;

import com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService;

/**
 * 類UserLoginAction.java的實現描述:使用者登入客戶端操作
 * 
 * @author arron 2018年11月14日 下午11:30:24
 */
public class UserLoginAction {
    public static void main(String[] args) throws Exception {
        IUserLoginService userLoginService = RPC.getProxy(IUserLoginService.class, 100L,
                new InetSocketAddress("localhost", 9999), new Configuration());
        String login = userLoginService.login("angelababy", "1314520");
        System.out.println(login);
    }
}

         執行效果圖:

         服務端

[main] DEBUG org.apache.hadoop.ipc.Server - rpcKind=RPC_WRITABLE, rpcRequestWrapperClass=class org.apache.hadoop.ipc.WritableRpcEngine$Invocation, rpcInvoker=or[email protected]27c6d458
[main] INFO org.apache.hadoop.ipc.CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue queueCapacity: 100 scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler
[main] DEBUG org.apache.hadoop.security.SecurityUtil - Setting hadoop.security.token.service.use_ip to true
[main] DEBUG org.apache.hadoop.ipc.Server - Server accepts auth methods:[SIMPLE]
[Socket Reader #1 for port 8887] INFO org.apache.hadoop.ipc.Server - Starting Socket Reader #1 for port 8887
[main] DEBUG org.apache.hadoop.ipc.metrics.RpcMetrics - Initialized MetricsRegistry{info=MetricsInfoImpl{name=rpc, description=rpc}, tags=[MetricsTag{info=MetricsInfoImpl{name=port, description=RPC port}, value=8887}], metrics=[]}
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.receivedBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of received bytes], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.sentBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of sent bytes], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcQueueTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Queue time], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Processing time], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.deferredRpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Deferred Processing time], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication failures], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication successes], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization failures], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization successes], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcClientBackoff with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of client backoff requests], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcSlowCalls with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of Slow RPC calls], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public java.lang.String org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnectionsPerUser() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections per user], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.callQueueLength() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Length of the call queue], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public long org.apache.hadoop.ipc.metrics.RpcMetrics.numDroppedConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of dropped connections], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcActivityForPort8887, Aggregate RPC metrics
[main] DEBUG org.apache.hadoop.ipc.metrics.RpcDetailedMetrics - MetricsInfoImpl{name=rpcdetailed, description=rpcdetailed}
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.rates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.deferredRpcRates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcDetailedActivityForPort8887, Per method RPC metrics
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Rate of successful kerberos logins and latency (milliseconds)], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Rate of failed kerberos logins and latency (milliseconds)], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[GetGroups], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeLong org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailuresTotal with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Renewal failures since startup], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Renewal failures since last successful login], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - UgiMetrics, User and group related metrics
[main] DEBUG org.apache.hadoop.security.authentication.util.KerberosName - Kerberos krb5 configuration not found, setting default realm to empty
[main] DEBUG org.apache.hadoop.security.Groups -  Creating new Groups object
[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Trying to load the custom-built native-hadoop library...
[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path
[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - java.library.path=E:\Program Files\Java\jdk1.7.0_80\bin;C:\WINDOWS\Sun\Java\bin;C:\WINDOWS\system32;C:\WINDOWS;E:\Program Files\Java\jdk1.7.0_80\jre\bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin/server;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/lib/amd64;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\iCLS\;C:\Program Files\Intel\Intel(R) Management Engine Components\iCLS\;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\WiFi\bin\;C:\Program Files\Common Files\Intel\WirelessCommon\;C:\Program Files (x86)\NVIDIA Corporation\PhysX\Common;D:\Program Files (x86)\VanDyke Software\Clients\;C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.0\;C:\WINDOWS\System32\OpenSSH\;E:\Program Files\Java\jdk1.8.0_131\bin;E:\Program Files\Java\jdk1.8.0_131\jre\bin;E:\Program Files\apache-maven-3.5.4\bin;%HADOOP_HOME%\bin;C:\Users\aaron\AppData\Local\Microsoft\WindowsApps;D:\Program Files (x86)\VanDyke Software\Clients\;;E:\sts-bundle\sts-3.9.5.RELEASE;;.
[main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[main] DEBUG org.apache.hadoop.util.PerformanceAdvisory - Falling back to shell based
[main] DEBUG org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback - Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping
[main] DEBUG org.apache.hadoop.security.Groups - Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000
[main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_PROTOCOL_BUFFER Protocol Name = org.apache.hadoop.ipc.ProtocolMetaInfoPB version=1 ProtocolImpl=org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos$ProtocolInfoService$2 protocolClass=org.apache.hadoop.ipc.ProtocolMetaInfoPB
[main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_WRITABLE Protocol Name = com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol version=1 ProtocolImpl=com.empire.hadoop.hadoop_rpc.service.MyNameNode protocolClass=com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol
[IPC Server Responder] INFO org.apache.hadoop.ipc.Server - IPC Server Responder: starting
[IPC Server listener on 8887] INFO org.apache.hadoop.ipc.Server - IPC Server listener on 8887: starting
[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: starting
[main] INFO org.apache.hadoop.ipc.CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue queueCapacity: 100 scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler
[main] DEBUG org.apache.hadoop.ipc.Server - Server accepts auth methods:[SIMPLE]
[Socket Reader #1 for port 9997] INFO org.apache.hadoop.ipc.Server - Starting Socket Reader #1 for port 9997
[main] DEBUG org.apache.hadoop.ipc.metrics.RpcMetrics - Initialized MetricsRegistry{info=MetricsInfoImpl{name=rpc, description=rpc}, tags=[MetricsTag{info=MetricsInfoImpl{name=port, description=RPC port}, value=9997}], metrics=[]}
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.receivedBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of received bytes], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.sentBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of sent bytes], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcQueueTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Queue time], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Processing time], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.deferredRpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Deferred Processing time], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication failures], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication successes], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization failures], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization successes], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcClientBackoff with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of client backoff requests], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcSlowCalls with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of Slow RPC calls], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public java.lang.String org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnectionsPerUser() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections per user], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.callQueueLength() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Length of the call queue], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public long org.apache.hadoop.ipc.metrics.RpcMetrics.numDroppedConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of dropped connections], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcActivityForPort9997, Aggregate RPC metrics
[main] DEBUG org.apache.hadoop.ipc.metrics.RpcDetailedMetrics - MetricsInfoImpl{name=rpcdetailed, description=rpcdetailed}
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.rates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.deferredRpcRates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcDetailedActivityForPort9997, Per method RPC metrics
[main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_PROTOCOL_BUFFER Protocol Name = org.apache.hadoop.ipc.ProtocolMetaInfoPB version=1 ProtocolImpl=org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos$ProtocolInfoService$2 protocolClass=org.apache.hadoop.ipc.ProtocolMetaInfoPB
[main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_WRITABLE Protocol Name = com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService version=100 ProtocolImpl=com.empire.hadoop.hadoop_rpc.service.UserLoginServiceImpl protocolClass=com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService
[IPC Server listener on 9997] INFO org.apache.hadoop.ipc.Server - IPC Server listener on 9997: starting
[IPC Server Responder] INFO org.apache.hadoop.ipc.Server - IPC Server Responder: starting
[IPC Server handler 0 on 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 9997: starting
[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running
[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running
[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running
[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running
[IPC Server listener on 8887] DEBUG org.apache.hadoop.ipc.Server - Server connection from 127.0.0.1:58401; # active connections: 1; # queued calls: 0
[Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server -  got #-3
[Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server - Successfully authorized userInfo {
  effectiveUser: "aaron"
}
protocol: "com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol"

[Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server -  got #0
[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: Call#0 Retry#0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484 from 127.0.0.1:58401 for RpcKind RPC_WRITABLE
[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.security.UserGroupInformation - PrivilegedAction as:aaron (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2606)
[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation - getMetaData
[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation - getMetaData
[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - Served: getMetaData queueTime= 4 procesingTime= 0
[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: responding to Call#0 Retry#0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484 from 127.0.0.1:58401
[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: responding to Call#0 Retry#0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484 from 127.0.0.1:58401 Wrote 89 bytes.
[Socket Reader #1 for port 8887] INFO org.apache.hadoop.ipc.Server - Socket Reader #1 for port 8887: readAndProcess from client 127.0.0.1:58401 threw exception [java.io.IOException: 遠端主機強迫關閉了一個現有的連線。]
java.io.IOException: 遠端主機強迫關閉了一個現有的連線。
	at sun.nio.ch.SocketDispatcher.read0(Native Method)
	at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:43)
	at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:223)
	at sun.nio.ch.IOUtil.read(IOUtil.java:197)
	at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:384)
	at org.apache.hadoop.ipc.Server.channelRead(Server.java:3192)
	at org.apache.hadoop.ipc.Server.access$2600(Server.java:136)
	at org.apache.hadoop.ipc.Server$Connection.readAndProcess(Server.java:1984)
	at org.apache.hadoop.ipc.Server$Listener.doRead(Server.java:1244)
	at org.apache.hadoop.ipc.Server$Listener$Reader.doRunLoop(Server.java:1100)
	at org.apache.hadoop.ipc.Server$Listener$Reader.run(Server.java:1071)
[Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server - Socket Reader #1 for port 8887: disconnecting client 127.0.0.1:58401. Number of active connections: 0
[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running
[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running
[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running
[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running
[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running
[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running
[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running
[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running
[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running
[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running
[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running
[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running

         客戶端

[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of successful kerberos logins and latency (milliseconds)], type=DEFAULT, always=false, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of failed kerberos logins and latency (milliseconds)], type=DEFAULT, always=false, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[GetGroups], type=DEFAULT, always=false, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeLong org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailuresTotal with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Renewal failures since startup], type=DEFAULT, always=false, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Renewal failures since last successful login], type=DEFAULT, always=false, sampleName=Ops)
[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - UgiMetrics, User and group related metrics
[main] DEBUG org.apache.hadoop.security.SecurityUtil - Setting hadoop.security.token.service.use_ip to true
[main] DEBUG org.apache.hadoop.security.authentication.util.KerberosName - Kerberos krb5 configuration not found, setting default realm to empty
[main] DEBUG org.apache.hadoop.security.Groups -  Creating new Groups object
[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Trying to load the custom-built native-hadoop library...
[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path
[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - java.library.path=E:\Program Files\Java\jdk1.7.0_80\bin;C:\WINDOWS\Sun\Java\bin;C:\WINDOWS\system32;C:\WINDOWS;E:\Program Files\Java\jdk1.7.0_80\jre\bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin/server;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/lib/amd64;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\iCLS\;C:\Program Files\Intel\Intel(R) Management Engine Components\iCLS\;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\WiFi\bin\;C:\Program Files\Common Files\Intel\WirelessCommon\;C:\Program Files (x86)\NVIDIA Corporation\PhysX\Common;D:\Program Files (x86)\VanDyke Software\Clients\;C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.0\;C:\WINDOWS\System32\OpenSSH\;E:\Program Files\Java\jdk1.8.0_131\bin;E:\Program Files\Java\jdk1.8.0_131\jre\bin;E:\Program Files\apache-maven-3.5.4\bin;%HADOOP_HOME%\bin;C:\Users\aaron\AppData\Local\Microsoft\WindowsApps;D:\Program Files (x86)\VanDyke Software\Clients\;;E:\sts-bundle\sts-3.9.5.RELEASE;;.
[main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[main] DEBUG org.apache.hadoop.util.PerformanceAdvisory - Falling back to shell based
[main] DEBUG org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback - Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping
[main] DEBUG org.apache.hadoop.security.Groups - Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - hadoop login
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - hadoop login commit
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - using local user:NTUserPrincipal: aaron
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - Using user: "NTUserPrincipal: aaron" with name aaron
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - User entry: "aaron"
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - Assuming keytab is managed externally since logged in from subject.
[main] DEBUG org.apache.hadoop.security.UserGroupInformation - UGI loginUser:aaron (auth:SIMPLE)
[main] DEBUG org.apache.hadoop.ipc.Server - rpcKind=RPC_WRITABLE, rpcRequestWrapperClass=class org.apache.hadoop.ipc.WritableRpcEngine$Invocation, rpcInvoker=or[email protected]271aec5c
[main] DEBUG org.apache.hadoop.ipc.Client - getting client out of cache: [email protected]
[main] DEBUG org.apache.hadoop.ipc.Client - The ping interval is 60000 ms.
[main] DEBUG org.apache.hadoop.ipc.Client - Connecting to localhost/127.0.0.1:8887
[IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron] DEBUG org.apache.hadoop.ipc.Client - IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron: starting, having connections 1
[IPC Parameter Sending Thread #0] DEBUG org.apache.hadoop.ipc.Client - IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron sending #0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484
[IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron] DEBUG org.apache.hadoop.ipc.Client - IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron got value #0
[main] DEBUG org.apache.hadoop.ipc.RPC - Call: getMetaData 207
/angela.mygirl: 3 - {BLK_1,BLK_2} ....

 

        最後總結:hadoop內建rpc可以很方便的開啟一個埠來提供遠端的rpc服務,其實它內部提供的服務都是通過該rpc框架協議來暴露的。如果大家幹興趣可以去看看各類hadoop提供的服務協議實現。

        最後寄語,以上是博主本次文章的全部內容,如果大家覺得博主的文章還不錯,請點贊;如果您對博主其它伺服器大資料技術或者博主本人感興趣,請關注博主部落格,並且歡迎隨時跟博主溝通交流。