001    /**
002     * Licensed to the Apache Software Foundation (ASF) under one
003     * or more contributor license agreements.  See the NOTICE file
004     * distributed with this work for additional information
005     * regarding copyright ownership.  The ASF licenses this file
006     * to you under the Apache License, Version 2.0 (the
007     * "License"); you may not use this file except in compliance
008     * with the License.  You may obtain a copy of the License at
009     *
010     *     http://www.apache.org/licenses/LICENSE-2.0
011     *
012     * Unless required by applicable law or agreed to in writing, software
013     * distributed under the License is distributed on an "AS IS" BASIS,
014     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015     * See the License for the specific language governing permissions and
016     * limitations under the License.
017     */
018    
019    package org.apache.hadoop.util;
020    
021    import java.io.DataInput;
022    import java.io.IOException;
023    
024    import org.apache.hadoop.ipc.RPC;
025    import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto;
026    import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto;
027    import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.*;
028    import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
029    import org.apache.hadoop.security.UserGroupInformation;
030    
031    import com.google.protobuf.ByteString;
032    
033    public abstract class ProtoUtil {
034    
035      /**
036       * Read a variable length integer in the same format that ProtoBufs encodes.
037       * @param in the input stream to read from
038       * @return the integer
039       * @throws IOException if it is malformed or EOF.
040       */
041      public static int readRawVarint32(DataInput in) throws IOException {
042        byte tmp = in.readByte();
043        if (tmp >= 0) {
044          return tmp;
045        }
046        int result = tmp & 0x7f;
047        if ((tmp = in.readByte()) >= 0) {
048          result |= tmp << 7;
049        } else {
050          result |= (tmp & 0x7f) << 7;
051          if ((tmp = in.readByte()) >= 0) {
052            result |= tmp << 14;
053          } else {
054            result |= (tmp & 0x7f) << 14;
055            if ((tmp = in.readByte()) >= 0) {
056              result |= tmp << 21;
057            } else {
058              result |= (tmp & 0x7f) << 21;
059              result |= (tmp = in.readByte()) << 28;
060              if (tmp < 0) {
061                // Discard upper 32 bits.
062                for (int i = 0; i < 5; i++) {
063                  if (in.readByte() >= 0) {
064                    return result;
065                  }
066                }
067                throw new IOException("Malformed varint");
068              }
069            }
070          }
071        }
072        return result;
073      }
074    
075      
076      /** 
077       * This method creates the connection context  using exactly the same logic
078       * as the old connection context as was done for writable where
079       * the effective and real users are set based on the auth method.
080       *
081       */
082      public static IpcConnectionContextProto makeIpcConnectionContext(
083          final String protocol,
084          final UserGroupInformation ugi, final AuthMethod authMethod) {
085        IpcConnectionContextProto.Builder result = IpcConnectionContextProto.newBuilder();
086        if (protocol != null) {
087          result.setProtocol(protocol);
088        }
089        UserInformationProto.Builder ugiProto =  UserInformationProto.newBuilder();
090        if (ugi != null) {
091          /*
092           * In the connection context we send only additional user info that
093           * is not derived from the authentication done during connection setup.
094           */
095          if (authMethod == AuthMethod.KERBEROS) {
096            // Real user was established as part of the connection.
097            // Send effective user only.
098            ugiProto.setEffectiveUser(ugi.getUserName());
099          } else if (authMethod == AuthMethod.TOKEN) {
100            // With token, the connection itself establishes 
101            // both real and effective user. Hence send none in header.
102          } else {  // Simple authentication
103            // No user info is established as part of the connection.
104            // Send both effective user and real user
105            ugiProto.setEffectiveUser(ugi.getUserName());
106            if (ugi.getRealUser() != null) {
107              ugiProto.setRealUser(ugi.getRealUser().getUserName());
108            }
109          }
110        }   
111        result.setUserInfo(ugiProto);
112        return result.build();
113      }
114      
115      public static UserGroupInformation getUgi(IpcConnectionContextProto context) {
116        if (context.hasUserInfo()) {
117          UserInformationProto userInfo = context.getUserInfo();
118            return getUgi(userInfo);
119        } else {
120          return null;
121        }
122      }
123      
124      public static UserGroupInformation getUgi(UserInformationProto userInfo) {
125        UserGroupInformation ugi = null;
126        String effectiveUser = userInfo.hasEffectiveUser() ? userInfo
127            .getEffectiveUser() : null;
128        String realUser = userInfo.hasRealUser() ? userInfo.getRealUser() : null;
129        if (effectiveUser != null) {
130          if (realUser != null) {
131            UserGroupInformation realUserUgi = UserGroupInformation
132                .createRemoteUser(realUser);
133            ugi = UserGroupInformation
134                .createProxyUser(effectiveUser, realUserUgi);
135          } else {
136            ugi = org.apache.hadoop.security.UserGroupInformation
137                .createRemoteUser(effectiveUser);
138          }
139        }
140        return ugi;
141      }
142      
143      static RpcKindProto convert(RPC.RpcKind kind) {
144        switch (kind) {
145        case RPC_BUILTIN: return RpcKindProto.RPC_BUILTIN;
146        case RPC_WRITABLE: return RpcKindProto.RPC_WRITABLE;
147        case RPC_PROTOCOL_BUFFER: return RpcKindProto.RPC_PROTOCOL_BUFFER;
148        }
149        return null;
150      }
151      
152      
153      public static RPC.RpcKind convert( RpcKindProto kind) {
154        switch (kind) {
155        case RPC_BUILTIN: return RPC.RpcKind.RPC_BUILTIN;
156        case RPC_WRITABLE: return RPC.RpcKind.RPC_WRITABLE;
157        case RPC_PROTOCOL_BUFFER: return RPC.RpcKind.RPC_PROTOCOL_BUFFER;
158        }
159        return null;
160      }
161     
162      public static RpcRequestHeaderProto makeRpcRequestHeader(RPC.RpcKind rpcKind,
163          RpcRequestHeaderProto.OperationProto operation, int callId,
164          int retryCount, byte[] uuid) {
165        RpcRequestHeaderProto.Builder result = RpcRequestHeaderProto.newBuilder();
166        result.setRpcKind(convert(rpcKind)).setRpcOp(operation).setCallId(callId)
167            .setRetryCount(retryCount).setClientId(ByteString.copyFrom(uuid));
168        return result.build();
169      }
170    }