1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19 package org.apache.hadoop.hbase.security.access;
20
21 import com.google.protobuf.RpcCallback;
22 import com.google.protobuf.RpcController;
23 import com.google.protobuf.Service;
24
25 import org.apache.commons.logging.Log;
26 import org.apache.commons.logging.LogFactory;
27 import org.apache.hadoop.hbase.classification.InterfaceAudience;
28 import org.apache.hadoop.conf.Configuration;
29 import org.apache.hadoop.fs.FileStatus;
30 import org.apache.hadoop.fs.FileSystem;
31 import org.apache.hadoop.fs.FileUtil;
32 import org.apache.hadoop.fs.Path;
33 import org.apache.hadoop.fs.permission.FsPermission;
34 import org.apache.hadoop.hbase.Coprocessor;
35 import org.apache.hadoop.hbase.CoprocessorEnvironment;
36 import org.apache.hadoop.hbase.TableName;
37 import org.apache.hadoop.hbase.DoNotRetryIOException;
38 import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
39 import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
40 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
41 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
42 import org.apache.hadoop.hbase.ipc.RpcServer;
43 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
44 import org.apache.hadoop.hbase.protobuf.ResponseConverter;
45 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
46 import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadService;
47 import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest;
48 import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadResponse;
49 import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest;
50 import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadResponse;
51 import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest;
52 import org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse;
53 import org.apache.hadoop.hbase.regionserver.Region;
54 import org.apache.hadoop.hbase.regionserver.Region.BulkLoadListener;
55 import org.apache.hadoop.hbase.security.SecureBulkLoadUtil;
56 import org.apache.hadoop.hbase.security.User;
57 import org.apache.hadoop.hbase.security.UserProvider;
58 import org.apache.hadoop.hbase.security.token.FsDelegationToken;
59 import org.apache.hadoop.hbase.util.Bytes;
60 import org.apache.hadoop.hbase.util.FSHDFSUtils;
61 import org.apache.hadoop.hbase.util.Methods;
62 import org.apache.hadoop.hbase.util.Pair;
63 import org.apache.hadoop.io.Text;
64 import org.apache.hadoop.security.UserGroupInformation;
65 import org.apache.hadoop.security.token.Token;
66
67 import java.io.IOException;
68 import java.math.BigInteger;
69 import java.security.PrivilegedAction;
70 import java.security.SecureRandom;
71 import java.util.ArrayList;
72 import java.util.HashMap;
73 import java.util.List;
74 import java.util.Map;
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102 @InterfaceAudience.Private
103 public class SecureBulkLoadEndpoint extends SecureBulkLoadService
104 implements CoprocessorService, Coprocessor {
105
106 public static final long VERSION = 0L;
107
108
109 private static final int RANDOM_WIDTH = 320;
110 private static final int RANDOM_RADIX = 32;
111
112 private static final Log LOG = LogFactory.getLog(SecureBulkLoadEndpoint.class);
113
114 private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx");
115 private final static FsPermission PERM_HIDDEN = FsPermission.valueOf("-rwx--x--x");
116
117 private SecureRandom random;
118 private FileSystem fs;
119 private Configuration conf;
120
121
122
123 private Path baseStagingDir;
124
125 private RegionCoprocessorEnvironment env;
126
127 private UserProvider userProvider;
128
129 @Override
130 public void start(CoprocessorEnvironment env) {
131 this.env = (RegionCoprocessorEnvironment)env;
132 random = new SecureRandom();
133 conf = env.getConfiguration();
134 baseStagingDir = SecureBulkLoadUtil.getBaseStagingDir(conf);
135 this.userProvider = UserProvider.instantiate(conf);
136
137 try {
138 fs = FileSystem.get(conf);
139 fs.mkdirs(baseStagingDir, PERM_HIDDEN);
140 fs.setPermission(baseStagingDir, PERM_HIDDEN);
141
142 fs.mkdirs(new Path(baseStagingDir,"DONOTERASE"), PERM_HIDDEN);
143 FileStatus status = fs.getFileStatus(baseStagingDir);
144 if(status == null) {
145 throw new IllegalStateException("Failed to create staging directory");
146 }
147 if(!status.getPermission().equals(PERM_HIDDEN)) {
148 throw new IllegalStateException(
149 "Directory already exists but permissions aren't set to '-rwx--x--x' ");
150 }
151 } catch (IOException e) {
152 throw new IllegalStateException("Failed to get FileSystem instance",e);
153 }
154 }
155
156 @Override
157 public void stop(CoprocessorEnvironment env) throws IOException {
158 }
159
160 @Override
161 public void prepareBulkLoad(RpcController controller,
162 PrepareBulkLoadRequest request,
163 RpcCallback<PrepareBulkLoadResponse> done){
164 try {
165 List<BulkLoadObserver> bulkLoadObservers = getBulkLoadObservers();
166
167 if(bulkLoadObservers != null) {
168 ObserverContext<RegionCoprocessorEnvironment> ctx =
169 new ObserverContext<RegionCoprocessorEnvironment>();
170 ctx.prepare(env);
171
172 for(BulkLoadObserver bulkLoadObserver : bulkLoadObservers) {
173 bulkLoadObserver.prePrepareBulkLoad(ctx, request);
174 }
175 }
176
177 String bulkToken = createStagingDir(baseStagingDir,
178 getActiveUser(), ProtobufUtil.toTableName(request.getTableName())).toString();
179 done.run(PrepareBulkLoadResponse.newBuilder().setBulkToken(bulkToken).build());
180 } catch (IOException e) {
181 ResponseConverter.setControllerException(controller, e);
182 }
183 done.run(null);
184 }
185
186 @Override
187 public void cleanupBulkLoad(RpcController controller,
188 CleanupBulkLoadRequest request,
189 RpcCallback<CleanupBulkLoadResponse> done) {
190 try {
191 List<BulkLoadObserver> bulkLoadObservers = getBulkLoadObservers();
192
193 if(bulkLoadObservers != null) {
194 ObserverContext<RegionCoprocessorEnvironment> ctx =
195 new ObserverContext<RegionCoprocessorEnvironment>();
196 ctx.prepare(env);
197
198 for(BulkLoadObserver bulkLoadObserver : bulkLoadObservers) {
199 bulkLoadObserver.preCleanupBulkLoad(ctx, request);
200 }
201 }
202
203 fs.delete(new Path(request.getBulkToken()), true);
204 done.run(CleanupBulkLoadResponse.newBuilder().build());
205 } catch (IOException e) {
206 ResponseConverter.setControllerException(controller, e);
207 }
208 done.run(null);
209 }
210
211 @Override
212 public void secureBulkLoadHFiles(RpcController controller,
213 SecureBulkLoadHFilesRequest request,
214 RpcCallback<SecureBulkLoadHFilesResponse> done) {
215 final List<Pair<byte[], String>> familyPaths = new ArrayList<Pair<byte[], String>>();
216 for(ClientProtos.BulkLoadHFileRequest.FamilyPath el : request.getFamilyPathList()) {
217 familyPaths.add(new Pair(el.getFamily().toByteArray(),el.getPath()));
218 }
219
220 Token userToken = null;
221 if (userProvider.isHadoopSecurityEnabled()) {
222 userToken = new Token(request.getFsToken().getIdentifier().toByteArray(), request.getFsToken()
223 .getPassword().toByteArray(), new Text(request.getFsToken().getKind()), new Text(
224 request.getFsToken().getService()));
225 }
226 final String bulkToken = request.getBulkToken();
227 User user = getActiveUser();
228 final UserGroupInformation ugi = user.getUGI();
229 if(userToken != null) {
230 ugi.addToken(userToken);
231 } else if (userProvider.isHadoopSecurityEnabled()) {
232
233
234 ResponseConverter.setControllerException(controller,
235 new DoNotRetryIOException("User token cannot be null"));
236 done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build());
237 return;
238 }
239
240 Region region = env.getRegion();
241 boolean bypass = false;
242 if (region.getCoprocessorHost() != null) {
243 try {
244 bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths);
245 } catch (IOException e) {
246 ResponseConverter.setControllerException(controller, e);
247 done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build());
248 return;
249 }
250 }
251 boolean loaded = false;
252 if (!bypass) {
253
254
255
256
257
258 if (userProvider.isHadoopSecurityEnabled()) {
259 FsDelegationToken targetfsDelegationToken = new FsDelegationToken(userProvider, "renewer");
260 try {
261 targetfsDelegationToken.acquireDelegationToken(fs);
262 } catch (IOException e) {
263 ResponseConverter.setControllerException(controller, e);
264 done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build());
265 return;
266 }
267 Token<?> targetFsToken = targetfsDelegationToken.getUserToken();
268 if (targetFsToken != null
269 && (userToken == null || !targetFsToken.getService().equals(userToken.getService()))) {
270 ugi.addToken(targetFsToken);
271 }
272 }
273
274 loaded = ugi.doAs(new PrivilegedAction<Boolean>() {
275 @Override
276 public Boolean run() {
277 FileSystem fs = null;
278 try {
279 Configuration conf = env.getConfiguration();
280 fs = FileSystem.get(conf);
281 for(Pair<byte[], String> el: familyPaths) {
282 Path stageFamily = new Path(bulkToken, Bytes.toString(el.getFirst()));
283 if(!fs.exists(stageFamily)) {
284 fs.mkdirs(stageFamily);
285 fs.setPermission(stageFamily, PERM_ALL_ACCESS);
286 }
287 }
288
289
290 return env.getRegion().bulkLoadHFiles(familyPaths, true,
291 new SecureBulkLoadListener(fs, bulkToken, conf));
292 } catch (Exception e) {
293 LOG.error("Failed to complete bulk load", e);
294 } finally {
295 if (fs != null) {
296 try {
297 if (!UserGroupInformation.getLoginUser().equals(ugi)) {
298 FileSystem.closeAllForUGI(ugi);
299 }
300 } catch (IOException e) {
301 LOG.error("Failed to close FileSystem for " + ugi.getUserName(), e);
302 }
303 }
304 }
305 return false;
306 }
307 });
308 }
309 if (region.getCoprocessorHost() != null) {
310 try {
311 loaded = region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded);
312 } catch (IOException e) {
313 ResponseConverter.setControllerException(controller, e);
314 done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(false).build());
315 return;
316 }
317 }
318 done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(loaded).build());
319 }
320
321 private List<BulkLoadObserver> getBulkLoadObservers() {
322 List<BulkLoadObserver> coprocessorList =
323 this.env.getRegion().getCoprocessorHost().findCoprocessors(BulkLoadObserver.class);
324
325 return coprocessorList;
326 }
327
328 private Path createStagingDir(Path baseDir,
329 User user,
330 TableName tableName) throws IOException {
331 String tblName = tableName.getNameAsString().replace(":", "_");
332 String randomDir = user.getShortName()+"__"+ tblName +"__"+
333 (new BigInteger(RANDOM_WIDTH, random).toString(RANDOM_RADIX));
334 return createStagingDir(baseDir, user, randomDir);
335 }
336
337 private Path createStagingDir(Path baseDir,
338 User user,
339 String randomDir) throws IOException {
340 Path p = new Path(baseDir, randomDir);
341 fs.mkdirs(p, PERM_ALL_ACCESS);
342 fs.setPermission(p, PERM_ALL_ACCESS);
343 return p;
344 }
345
346 private User getActiveUser() {
347 User user = RpcServer.getRequestUser();
348 if (user == null) {
349 return null;
350 }
351
352
353 if (userProvider.isHadoopSecurityEnabled()
354 && "simple".equalsIgnoreCase(conf.get(User.HBASE_SECURITY_CONF_KEY))) {
355 return User.createUserForTesting(conf, user.getShortName(), new String[]{});
356 }
357
358 return user;
359 }
360
361 @Override
362 public Service getService() {
363 return this;
364 }
365
366 private static class SecureBulkLoadListener implements BulkLoadListener {
367
368 private FileSystem fs;
369 private String stagingDir;
370 private Configuration conf;
371
372 private FileSystem srcFs = null;
373 private Map<String, FsPermission> origPermissions = null;
374
375 public SecureBulkLoadListener(FileSystem fs, String stagingDir, Configuration conf) {
376 this.fs = fs;
377 this.stagingDir = stagingDir;
378 this.conf = conf;
379 this.origPermissions = new HashMap<String, FsPermission>();
380 }
381
382 @Override
383 public String prepareBulkLoad(final byte[] family, final String srcPath) throws IOException {
384 Path p = new Path(srcPath);
385 Path stageP = new Path(stagingDir, new Path(Bytes.toString(family), p.getName()));
386 if (srcFs == null) {
387 srcFs = FileSystem.get(p.toUri(), conf);
388 }
389
390 if(!isFile(p)) {
391 throw new IOException("Path does not reference a file: " + p);
392 }
393
394
395 if (!FSHDFSUtils.isSameHdfs(conf, srcFs, fs)) {
396 LOG.debug("Bulk-load file " + srcPath + " is on different filesystem than " +
397 "the destination filesystem. Copying file over to destination staging dir.");
398 FileUtil.copy(srcFs, p, fs, stageP, false, conf);
399 } else {
400 LOG.debug("Moving " + p + " to " + stageP);
401 FileStatus origFileStatus = fs.getFileStatus(p);
402 origPermissions.put(srcPath, origFileStatus.getPermission());
403 if(!fs.rename(p, stageP)) {
404 throw new IOException("Failed to move HFile: " + p + " to " + stageP);
405 }
406 }
407 fs.setPermission(stageP, PERM_ALL_ACCESS);
408 return stageP.toString();
409 }
410
411 @Override
412 public void doneBulkLoad(byte[] family, String srcPath) throws IOException {
413 LOG.debug("Bulk Load done for: " + srcPath);
414 }
415
416 @Override
417 public void failedBulkLoad(final byte[] family, final String srcPath) throws IOException {
418 if (!FSHDFSUtils.isSameHdfs(conf, srcFs, fs)) {
419
420 return;
421 }
422 Path p = new Path(srcPath);
423 Path stageP = new Path(stagingDir,
424 new Path(Bytes.toString(family), p.getName()));
425 LOG.debug("Moving " + stageP + " back to " + p);
426 if(!fs.rename(stageP, p))
427 throw new IOException("Failed to move HFile: " + stageP + " to " + p);
428
429
430 if (origPermissions.containsKey(srcPath)) {
431 fs.setPermission(p, origPermissions.get(srcPath));
432 } else {
433 LOG.warn("Can't find previous permission for path=" + srcPath);
434 }
435 }
436
437
438
439
440
441
442
443
444 private boolean isFile(Path p) throws IOException {
445 FileStatus status = srcFs.getFileStatus(p);
446 boolean isFile = !status.isDirectory();
447 try {
448 isFile = isFile && !(Boolean)Methods.call(FileStatus.class, status, "isSymlink", null, null);
449 } catch (Exception e) {
450 }
451 return isFile;
452 }
453 }
454 }