├── .classpath
├── .gitignore
├── .project
├── .settings
├── org.eclipse.core.resources.prefs
├── org.eclipse.jdt.core.prefs
└── org.eclipse.m2e.core.prefs
├── README.txt
├── pom.xml
└── src
├── main
├── java
│ ├── com
│ │ └── catt
│ │ │ └── httpfs
│ │ │ └── client
│ │ │ ├── httpclient
│ │ │ ├── HttpFSClient.java
│ │ │ └── HttpFSUtils.java
│ │ │ └── utils
│ │ │ └── HttpFSConf.java
│ └── org
│ │ └── apache
│ │ └── hadoop
│ │ └── fs
│ │ └── http
│ │ └── client
│ │ ├── HttpFSFileSystem.java
│ │ ├── HttpKerberosAuthenticator.java
│ │ └── HttpPseudoAuthenticator.java
└── resources
│ └── httpfs.properties
└── test
└── java
├── com
└── catt
│ └── httpfs
│ └── client
│ └── httpclient
│ └── Demo.java
└── org
└── apache
└── hadoop
└── fs
└── http
└── client
└── Demo.java
/.classpath:
--------------------------------------------------------------------------------
1 |
2 | HttpURLConnection
for the
187 | * HttpFSServer file system operations.
188 | *
189 | * This methods performs and injects any needed authentication credentials
190 | * via the {@link #getConnection(URL, String)} method
191 | *
192 | * @param method
193 | * the HTTP method.
194 | * @param params
195 | * the query string parameters.
196 | * @param path
197 | * the file path
198 | * @param makeQualified
199 | * if the path should be 'makeQualified'
200 | *
201 | * @return a HttpURLConnection
for the HttpFSServer server,
202 | * authenticated and ready to use for the specified path and file
203 | * system operation.
204 | *
205 | * @throws IOException
206 | * thrown if an IO error occurrs.
207 | */
208 | private HttpURLConnection getConnection(String method,
209 | MapHttpURLConnection
for the
234 | * specified URL.
235 | *
236 | * This methods performs and injects any needed authentication credentials.
237 | *
238 | * @param url
239 | * url to connect to.
240 | * @param method
241 | * the HTTP method.
242 | *
243 | * @return a HttpURLConnection
for the HttpFSServer server,
244 | * authenticated and ready to use for the specified path and file
245 | * system operation.
246 | *
247 | * @throws IOException
248 | * thrown if an IO error occurrs.
249 | */
250 | private HttpURLConnection getConnection(URL url, String method)
251 | throws IOException {
252 | Class extends Authenticator> klass = getConf().getClass(
253 | "httpfs.authenticator.class", HttpKerberosAuthenticator.class,
254 | Authenticator.class);
255 | Authenticator authenticator = ReflectionUtils.newInstance(klass,
256 | getConf());
257 | try {
258 | HttpURLConnection conn = new AuthenticatedURL(authenticator)
259 | .openConnection(url, authToken);
260 | conn.setRequestMethod(method);
261 | if (method.equals(HTTP_POST) || method.equals(HTTP_PUT)) {
262 | conn.setDoOutput(true);
263 | }
264 | return conn;
265 | } catch (Exception ex) {
266 | throw new IOException(ex);
267 | }
268 | }
269 |
270 | /**
271 | * Convenience method that JSON Parses the InputStream
of a
272 | * HttpURLConnection
.
273 | *
274 | * @param conn
275 | * the HttpURLConnection
.
276 | *
277 | * @return the parsed JSON object.
278 | *
279 | * @throws IOException
280 | * thrown if the InputStream
could not be JSON
281 | * parsed.
282 | */
283 | private static Object jsonParse(HttpURLConnection conn) throws IOException {
284 | try {
285 | JSONParser parser = new JSONParser();
286 | return parser.parse(new InputStreamReader(conn.getInputStream()));
287 | } catch (ParseException ex) {
288 | throw new IOException("JSON parser error, " + ex.getMessage(), ex);
289 | }
290 | }
291 |
292 | /**
293 | * Validates the status of an HttpURLConnection
against an
294 | * expected HTTP status code. If the current status code is not the expected
295 | * one it throws an exception with a detail message using Server side error
296 | * messages if available.
297 | *
298 | * @param conn
299 | * the HttpURLConnection
.
300 | * @param expected
301 | * the expected HTTP status code.
302 | *
303 | * @throws IOException
304 | * thrown if the current status code does not match the expected
305 | * one.
306 | */
307 | private static void validateResponse(HttpURLConnection conn, int expected)
308 | throws IOException {
309 | int status = conn.getResponseCode();
310 | if (status != expected) {
311 | try {
312 | JSONObject json = (JSONObject) jsonParse(conn);
313 | json = (JSONObject) json.get(ERROR_JSON);
314 | String message = (String) json.get(ERROR_MESSAGE_JSON);
315 | String exception = (String) json.get(ERROR_EXCEPTION_JSON);
316 | String className = (String) json.get(ERROR_CLASSNAME_JSON);
317 |
318 | try {
319 | ClassLoader cl = HttpFSFileSystem.class.getClassLoader();
320 | Class klass = cl.loadClass(className);
321 | Constructor constr = klass.getConstructor(String.class);
322 | throw (IOException) constr.newInstance(message);
323 | } catch (IOException ex) {
324 | throw ex;
325 | } catch (Exception ex) {
326 | throw new IOException(MessageFormat.format("{0} - {1}",
327 | exception, message));
328 | }
329 | } catch (IOException ex) {
330 | if (ex.getCause() instanceof IOException) {
331 | throw (IOException) ex.getCause();
332 | }
333 | throw new IOException(MessageFormat.format(
334 | "HTTP status [{0}], {1}", status,
335 | conn.getResponseMessage()));
336 | }
337 | }
338 | }
339 |
340 | /**
341 | * Called after a new FileSystem instance is constructed.
342 | *
343 | * @param name
344 | * a uri whose authority section names the host, port, etc. for
345 | * this FileSystem
346 | * @param conf
347 | * the configuration
348 | */
349 | @Override
350 | public void initialize(URI name, Configuration conf) throws IOException {
351 | UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
352 | doAs = ugi.getUserName();
353 | //todo 需要修改
354 | doAs="hdfs";
355 | super.initialize(name, conf);
356 | try {
357 | uri = new URI(name.getScheme() + "://" + name.getHost() + ":"
358 | + name.getPort());
359 | } catch (URISyntaxException ex) {
360 | throw new IOException(ex);
361 | }
362 | }
363 |
364 | /**
365 | * Returns a URI whose scheme and authority identify this FileSystem.
366 | *
367 | * @return the URI whose scheme and authority identify this FileSystem.
368 | */
369 | @Override
370 | public URI getUri() {
371 | return uri;
372 | }
373 |
374 | /**
375 | * HttpFSServer subclass of the FSDataInputStream
.
376 | *
377 | * This implementation does not support the PositionReadable
378 | * and Seekable
methods.
379 | */
380 | private static class HttpFSDataInputStream extends FilterInputStream
381 | implements Seekable, PositionedReadable {
382 |
383 | protected HttpFSDataInputStream(InputStream in, int bufferSize) {
384 | super(new BufferedInputStream(in, bufferSize));
385 | }
386 |
387 | @Override
388 | public int read(long position, byte[] buffer, int offset, int length)
389 | throws IOException {
390 | throw new UnsupportedOperationException();
391 | }
392 |
393 | @Override
394 | public void readFully(long position, byte[] buffer, int offset,
395 | int length) throws IOException {
396 | throw new UnsupportedOperationException();
397 | }
398 |
399 | @Override
400 | public void readFully(long position, byte[] buffer) throws IOException {
401 | throw new UnsupportedOperationException();
402 | }
403 |
404 | @Override
405 | public void seek(long pos) throws IOException {
406 | throw new UnsupportedOperationException();
407 | }
408 |
409 | @Override
410 | public long getPos() throws IOException {
411 | throw new UnsupportedOperationException();
412 | }
413 |
414 | @Override
415 | public boolean seekToNewSource(long targetPos) throws IOException {
416 | throw new UnsupportedOperationException();
417 | }
418 | }
419 |
420 | /**
421 | * Opens an FSDataInputStream at the indicated Path.
does not support the
423 | * PositionReadable
and Seekable
methods.
424 | *
425 | * @param f
426 | * the file name to open
427 | * @param bufferSize
428 | * the size of the buffer to be used.
429 | */
430 | @Override
431 | public FSDataInputStream open(Path f, int bufferSize) throws IOException {
432 | Map params = new HashMap();
433 | params.put(OP_PARAM, Operation.OPEN.toString());
434 | HttpURLConnection conn = getConnection(Operation.OPEN.getMethod(),
435 | params, f, true);
436 | validateResponse(conn, HttpURLConnection.HTTP_OK);
437 | return new FSDataInputStream(new HttpFSDataInputStream(
438 | conn.getInputStream(), bufferSize));
439 | }
440 |
441 | /**
442 | * HttpFSServer subclass of the FSDataOutputStream
.
443 | *
444 | * This implementation closes the underlying HTTP connection validating the
445 | * Http connection status at closing time.
446 | */
447 | private static class HttpFSDataOutputStream extends FSDataOutputStream {
448 | private HttpURLConnection conn;
449 | private int closeStatus;
450 |
451 | public HttpFSDataOutputStream(HttpURLConnection conn, OutputStream out,
452 | int closeStatus, Statistics stats) throws IOException {
453 | super(out, stats);
454 | this.conn = conn;
455 | this.closeStatus = closeStatus;
456 | }
457 |
458 | @Override
459 | public void close() throws IOException {
460 | try {
461 | super.close();
462 | } finally {
463 | validateResponse(conn, closeStatus);
464 | }
465 | }
466 |
467 | }
468 |
469 | /**
470 | * Converts a FsPermission
to a Unix octal representation.
471 | *
472 | * @param p
473 | * the permission.
474 | *
475 | * @return the Unix string symbolic reprentation.
476 | */
477 | public static String permissionToString(FsPermission p) {
478 | return Integer.toString((p == null) ? DEFAULT_PERMISSION : p.toShort(),
479 | 8);
480 | }
481 |
482 | /*
483 | * Common handling for uploading data for create and append operations.
484 | */
485 | private FSDataOutputStream uploadData(String method, Path f,
486 | Map params, int bufferSize, int expectedStatus)
487 | throws IOException {
488 | HttpURLConnection conn = getConnection(method, params, f, true);
489 | conn.setInstanceFollowRedirects(false);
490 | boolean exceptionAlreadyHandled = false;
491 | try {
492 | if (conn.getResponseCode() == HTTP_TEMPORARY_REDIRECT) {
493 | exceptionAlreadyHandled = true;
494 | String location = conn.getHeaderField("Location");
495 | if (location != null) {
496 | conn = getConnection(new URL(location), method);
497 | conn.setRequestProperty("Content-Type", UPLOAD_CONTENT_TYPE);
498 | try {
499 | OutputStream os = new BufferedOutputStream(
500 | conn.getOutputStream(), bufferSize);
501 | return new HttpFSDataOutputStream(conn, os,
502 | expectedStatus, statistics);
503 | } catch (IOException ex) {
504 | validateResponse(conn, expectedStatus);
505 | throw ex;
506 | }
507 | } else {
508 | validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
509 | throw new IOException(
510 | "Missing HTTP 'Location' header for ["
511 | + conn.getURL() + "]");
512 | }
513 | } else {
514 | throw new IOException(MessageFormat.format(
515 | "Expected HTTP status was [307], received [{0}]",
516 | conn.getResponseCode()));
517 | }
518 | } catch (IOException ex) {
519 | if (exceptionAlreadyHandled) {
520 | throw ex;
521 | } else {
522 | validateResponse(conn, HTTP_TEMPORARY_REDIRECT);
523 | throw ex;
524 | }
525 | }
526 | }
527 |
528 | /**
529 | * Opens an FSDataOutputStream at the indicated Path with write-progress
530 | * reporting.
531 | *
532 | * IMPORTANT: The Progressable
parameter is not used.
533 | *
534 | * @param f
535 | * the file name to open.
536 | * @param permission
537 | * file permission.
538 | * @param overwrite
539 | * if a file with this name already exists, then if true, the
540 | * file will be overwritten, and if false an error will be
541 | * thrown.
542 | * @param bufferSize
543 | * the size of the buffer to be used.
544 | * @param replication
545 | * required block replication for the file.
546 | * @param blockSize
547 | * block size.
548 | * @param progress
549 | * progressable.
550 | *
551 | * @throws IOException
552 | * @see #setPermission(Path, FsPermission)
553 | */
554 | @Override
555 | public FSDataOutputStream create(Path f, FsPermission permission,
556 | boolean overwrite, int bufferSize, short replication,
557 | long blockSize, Progressable progress) throws IOException {
558 | Map params = new HashMap();
559 | params.put(OP_PARAM, Operation.CREATE.toString());
560 | params.put(OVERWRITE_PARAM, Boolean.toString(overwrite));
561 | params.put(REPLICATION_PARAM, Short.toString(replication));
562 | params.put(BLOCKSIZE_PARAM, Long.toString(blockSize));
563 | params.put(PERMISSION_PARAM, permissionToString(permission));
564 | return uploadData(Operation.CREATE.getMethod(), f, params, bufferSize,
565 | HttpURLConnection.HTTP_CREATED);
566 | }
567 |
568 | /**
569 | * Append to an existing file (optional operation).
570 | *
571 | * IMPORTANT: The Progressable
parameter is not used.
572 | *
573 | * @param f
574 | * the existing file to be appended.
575 | * @param bufferSize
576 | * the size of the buffer to be used.
577 | * @param progress
578 | * for reporting progress if it is not null.
579 | *
580 | * @throws IOException
581 | */
582 | @Override
583 | public FSDataOutputStream append(Path f, int bufferSize,
584 | Progressable progress) throws IOException {
585 | Map params = new HashMap();
586 | params.put(OP_PARAM, Operation.APPEND.toString());
587 | return uploadData(Operation.APPEND.getMethod(), f, params, bufferSize,
588 | HttpURLConnection.HTTP_OK);
589 | }
590 |
591 | /**
592 | * Renames Path src to Path dst. Can take place on local fs or remote DFS.
593 | */
594 | @Override
595 | public boolean rename(Path src, Path dst) throws IOException {
596 | Map params = new HashMap();
597 | params.put(OP_PARAM, Operation.RENAME.toString());
598 | params.put(DESTINATION_PARAM, dst.toString());
599 | HttpURLConnection conn = getConnection(Operation.RENAME.getMethod(),
600 | params, src, true);
601 | validateResponse(conn, HttpURLConnection.HTTP_OK);
602 | JSONObject json = (JSONObject) jsonParse(conn);
603 | return (Boolean) json.get(RENAME_JSON);
604 | }
605 |
606 | /**
607 | * Delete a file.
608 | *
609 | * @deprecated Use delete(Path, boolean) instead
610 | */
611 | @SuppressWarnings({ "deprecation" })
612 | @Deprecated
613 | @Override
614 | public boolean delete(Path f) throws IOException {
615 | return delete(f, false);
616 | }
617 |
618 | /**
619 | * Delete a file.
620 | *
621 | * @param f
622 | * the path to delete.
623 | * @param recursive
624 | * if path is a directory and set to true, the directory is
625 | * deleted else throws an exception. In case of a file the
626 | * recursive can be set to either true or false.
627 | *
628 | * @return true if delete is successful else false.
629 | *
630 | * @throws IOException
631 | */
632 | @Override
633 | public boolean delete(Path f, boolean recursive) throws IOException {
634 | Map params = new HashMap();
635 | params.put(OP_PARAM, Operation.DELETE.toString());
636 | params.put(RECURSIVE_PARAM, Boolean.toString(recursive));
637 | HttpURLConnection conn = getConnection(Operation.DELETE.getMethod(),
638 | params, f, true);
639 | validateResponse(conn, HttpURLConnection.HTTP_OK);
640 | JSONObject json = (JSONObject) jsonParse(conn);
641 | return (Boolean) json.get(DELETE_JSON);
642 | }
643 |
644 | /**
645 | * List the statuses of the files/directories in the given path if the path
646 | * is a directory.
647 | *
648 | * @param f
649 | * given path
650 | *
651 | * @return the statuses of the files/directories in the given patch
652 | *
653 | * @throws IOException
654 | */
655 | @Override
656 | public FileStatus[] listStatus(Path f) throws IOException {
657 | Map params = new HashMap();
658 | params.put(OP_PARAM, Operation.LISTSTATUS.toString());
659 | HttpURLConnection conn = getConnection(
660 | Operation.LISTSTATUS.getMethod(), params, f, true);
661 | validateResponse(conn, HttpURLConnection.HTTP_OK);
662 | JSONObject json = (JSONObject) jsonParse(conn);
663 | json = (JSONObject) json.get(FILE_STATUSES_JSON);
664 | JSONArray jsonArray = (JSONArray) json.get(FILE_STATUS_JSON);
665 | FileStatus[] array = new FileStatus[jsonArray.size()];
666 | f = makeQualified(f);
667 | for (int i = 0; i < jsonArray.size(); i++) {
668 | array[i] = createFileStatus(f, (JSONObject) jsonArray.get(i));
669 | }
670 | return array;
671 | }
672 |
673 | /**
674 | * Set the current working directory for the given file system. All relative
675 | * paths will be resolved relative to it.
676 | *
677 | * @param newDir
678 | * new directory.
679 | */
680 | @Override
681 | public void setWorkingDirectory(Path newDir) {
682 | workingDir = newDir;
683 | }
684 |
685 | /**
686 | * Get the current working directory for the given file system
687 | *
688 | * @return the directory pathname
689 | */
690 | @Override
691 | public Path getWorkingDirectory() {
692 | if (workingDir == null) {
693 | workingDir = getHomeDirectory();
694 | }
695 | return workingDir;
696 | }
697 |
698 | /**
699 | * Make the given file and all non-existent parents into directories. Has
700 | * the semantics of Unix 'mkdir -p'. Existence of the directory hierarchy is
701 | * not an error.
702 | */
703 | @Override
704 | public boolean mkdirs(Path f, FsPermission permission) throws IOException {
705 | Map params = new HashMap();
706 | params.put(OP_PARAM, Operation.MKDIRS.toString());
707 | params.put(PERMISSION_PARAM, permissionToString(permission));
708 | HttpURLConnection conn = getConnection(Operation.MKDIRS.getMethod(),
709 | params, f, true);
710 | validateResponse(conn, HttpURLConnection.HTTP_OK);
711 | JSONObject json = (JSONObject) jsonParse(conn);
712 | return (Boolean) json.get(MKDIRS_JSON);
713 | }
714 |
715 | /**
716 | * Return a file status object that represents the path.
717 | *
718 | * @param f
719 | * The path we want information from
720 | *
721 | * @return a FileStatus object
722 | *
723 | * @throws FileNotFoundException
724 | * when the path does not exist; IOException see specific
725 | * implementation
726 | */
727 | @Override
728 | public FileStatus getFileStatus(Path f) throws IOException {
729 | Map params = new HashMap();
730 | params.put(OP_PARAM, Operation.GETFILESTATUS.toString());
731 | HttpURLConnection conn = getConnection(
732 | Operation.GETFILESTATUS.getMethod(), params, f, true);
733 | validateResponse(conn, HttpURLConnection.HTTP_OK);
734 | JSONObject json = (JSONObject) jsonParse(conn);
735 | json = (JSONObject) json.get(FILE_STATUS_JSON);
736 | f = makeQualified(f);
737 | return createFileStatus(f, json);
738 | }
739 |
740 | /**
741 | * Return the current user's home directory in this filesystem. The default
742 | * implementation returns "/user/$USER/".
743 | */
744 | @Override
745 | public Path getHomeDirectory() {
746 | Map params = new HashMap();
747 | params.put(OP_PARAM, Operation.GETHOMEDIRECTORY.toString());
748 | try {
749 | HttpURLConnection conn = getConnection(
750 | Operation.GETHOMEDIRECTORY.getMethod(), params, new Path(
751 | getUri().toString(), "/"), false);
752 | validateResponse(conn, HttpURLConnection.HTTP_OK);
753 | JSONObject json = (JSONObject) jsonParse(conn);
754 | return new Path((String) json.get(HOME_DIR_JSON));
755 | } catch (IOException ex) {
756 | throw new RuntimeException(ex);
757 | }
758 | }
759 |
760 | /**
761 | * Set owner of a path (i.e. a file or a directory). The parameters username
762 | * and groupname cannot both be null.
763 | *
764 | * @param p
765 | * The path
766 | * @param username
767 | * If it is null, the original username remains unchanged.
768 | * @param groupname
769 | * If it is null, the original groupname remains unchanged.
770 | */
771 | @Override
772 | public void setOwner(Path p, String username, String groupname)
773 | throws IOException {
774 | Map params = new HashMap();
775 | params.put(OP_PARAM, Operation.SETOWNER.toString());
776 | params.put(OWNER_PARAM, username);
777 | params.put(GROUP_PARAM, groupname);
778 | HttpURLConnection conn = getConnection(Operation.SETOWNER.getMethod(),
779 | params, p, true);
780 | validateResponse(conn, HttpURLConnection.HTTP_OK);
781 | }
782 |
783 | /**
784 | * Set permission of a path.
785 | *
786 | * @param p
787 | * path.
788 | * @param permission
789 | * permission.
790 | */
791 | @Override
792 | public void setPermission(Path p, FsPermission permission)
793 | throws IOException {
794 | Map params = new HashMap();
795 | params.put(OP_PARAM, Operation.SETPERMISSION.toString());
796 | params.put(PERMISSION_PARAM, permissionToString(permission));
797 | HttpURLConnection conn = getConnection(
798 | Operation.SETPERMISSION.getMethod(), params, p, true);
799 | validateResponse(conn, HttpURLConnection.HTTP_OK);
800 | }
801 |
802 | /**
803 | * Set access time of a file
804 | *
805 | * @param p
806 | * The path
807 | * @param mtime
808 | * Set the modification time of this file. The number of
809 | * milliseconds since Jan 1, 1970. A value of -1 means that this
810 | * call should not set modification time.
811 | * @param atime
812 | * Set the access time of this file. The number of milliseconds
813 | * since Jan 1, 1970. A value of -1 means that this call should
814 | * not set access time.
815 | */
816 | @Override
817 | public void setTimes(Path p, long mtime, long atime) throws IOException {
818 | Map params = new HashMap();
819 | params.put(OP_PARAM, Operation.SETTIMES.toString());
820 | params.put(MODIFICATION_TIME_PARAM, Long.toString(mtime));
821 | params.put(ACCESS_TIME_PARAM, Long.toString(atime));
822 | HttpURLConnection conn = getConnection(Operation.SETTIMES.getMethod(),
823 | params, p, true);
824 | validateResponse(conn, HttpURLConnection.HTTP_OK);
825 | }
826 |
827 | /**
828 | * Set replication for an existing file.
829 | *
830 | * @param src
831 | * file name
832 | * @param replication
833 | * new replication
834 | *
835 | * @return true if successful; false if file does not exist or is a
836 | * directory
837 | *
838 | * @throws IOException
839 | */
840 | @Override
841 | public boolean setReplication(Path src, short replication)
842 | throws IOException {
843 | Map params = new HashMap();
844 | params.put(OP_PARAM, Operation.SETREPLICATION.toString());
845 | params.put(REPLICATION_PARAM, Short.toString(replication));
846 | HttpURLConnection conn = getConnection(
847 | Operation.SETREPLICATION.getMethod(), params, src, true);
848 | validateResponse(conn, HttpURLConnection.HTTP_OK);
849 | JSONObject json = (JSONObject) jsonParse(conn);
850 | return (Boolean) json.get(SET_REPLICATION_JSON);
851 | }
852 |
853 | /**
854 | * Creates a FileStatus
object using a JSON file-status payload
855 | * received from a HttpFSServer server.
856 | *
857 | * @param json
858 | * a JSON file-status payload received from a HttpFSServer server
859 | *
860 | * @return the corresponding FileStatus
861 | */
862 | private FileStatus createFileStatus(Path parent, JSONObject json) {
863 | String pathSuffix = (String) json.get(PATH_SUFFIX_JSON);
864 | Path path = (pathSuffix.equals("")) ? parent : new Path(parent,
865 | pathSuffix);
866 | FILE_TYPE type = FILE_TYPE.valueOf((String) json.get(TYPE_JSON));
867 | long len = (Long) json.get(LENGTH_JSON);
868 | String owner = (String) json.get(OWNER_JSON);
869 | String group = (String) json.get(GROUP_JSON);
870 | FsPermission permission = new FsPermission(Short.parseShort(
871 | (String) json.get(PERMISSION_JSON), 8));
872 | long aTime = (Long) json.get(ACCESS_TIME_JSON);
873 | long mTime = (Long) json.get(MODIFICATION_TIME_JSON);
874 | long blockSize = (Long) json.get(BLOCK_SIZE_JSON);
875 | short replication = ((Long) json.get(REPLICATION_JSON)).shortValue();
876 | FileStatus fileStatus = null;
877 |
878 | switch (type) {
879 | case FILE:
880 | case DIRECTORY:
881 | fileStatus = new FileStatus(len, (type == FILE_TYPE.DIRECTORY),
882 | replication, blockSize, mTime, aTime, permission, owner,
883 | group, path);
884 | break;
885 | case SYMLINK:
886 | throw new IllegalArgumentException(
887 | "SYMLINKs are not supported in cdh3 : "
888 | + fileStatus.getPath());
889 | }
890 | return fileStatus;
891 | }
892 |
893 | @Override
894 | public ContentSummary getContentSummary(Path f) throws IOException {
895 | Map params = new HashMap();
896 | params.put(OP_PARAM, Operation.GETCONTENTSUMMARY.toString());
897 | HttpURLConnection conn = getConnection(
898 | Operation.GETCONTENTSUMMARY.getMethod(), params, f, true);
899 | validateResponse(conn, HttpURLConnection.HTTP_OK);
900 | JSONObject json = (JSONObject) ((JSONObject) jsonParse(conn))
901 | .get(CONTENT_SUMMARY_JSON);
902 | return new ContentSummary((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON),
903 | (Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON),
904 | (Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON),
905 | (Long) json.get(CONTENT_SUMMARY_QUOTA_JSON),
906 | (Long) json.get(CONTENT_SUMMARY_SPACE_CONSUMED_JSON),
907 | (Long) json.get(CONTENT_SUMMARY_SPACE_QUOTA_JSON));
908 | }
909 |
910 | @Override
911 | public FileChecksum getFileChecksum(Path f) throws IOException {
912 | Map params = new HashMap();
913 | params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString());
914 | HttpURLConnection conn = getConnection(
915 | Operation.GETFILECHECKSUM.getMethod(), params, f, true);
916 | validateResponse(conn, HttpURLConnection.HTTP_OK);
917 | final JSONObject json = (JSONObject) ((JSONObject) jsonParse(conn))
918 | .get(FILE_CHECKSUM_JSON);
919 | return new FileChecksum() {
920 | @Override
921 | public String getAlgorithmName() {
922 | return (String) json.get(CHECKSUM_ALGORITHM_JSON);
923 | }
924 |
925 | @Override
926 | public int getLength() {
927 | return ((Long) json.get(CHECKSUM_LENGTH_JSON)).intValue();
928 | }
929 |
930 | @Override
931 | public byte[] getBytes() {
932 | return StringUtils.hexStringToByte((String) json
933 | .get(CHECKSUM_BYTES_JSON));
934 | }
935 |
936 | @Override
937 | public void write(DataOutput out) throws IOException {
938 | throw new UnsupportedOperationException();
939 | }
940 |
941 | @Override
942 | public void readFields(DataInput in) throws IOException {
943 | throw new UnsupportedOperationException();
944 | }
945 | };
946 | }
947 |
948 | }
--------------------------------------------------------------------------------
/src/main/java/org/apache/hadoop/fs/http/client/HttpKerberosAuthenticator.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 | package org.apache.hadoop.fs.http.client;
19 |
20 | import org.apache.hadoop.security.authentication.client.Authenticator;
21 | import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
22 |
23 | /**
24 | * A KerberosAuthenticator
subclass that fallback to
25 | * {@link HttpPseudoAuthenticator}.
26 | */
27 | public class HttpKerberosAuthenticator extends KerberosAuthenticator {
28 |
29 | /**
30 | * Returns the fallback authenticator if the server does not use
31 | * Kerberos SPNEGO HTTP authentication.
32 | *
33 | * @return a {@link HttpPseudoAuthenticator} instance.
34 | */
35 | @Override
36 | protected Authenticator getFallBackAuthenticator() {
37 | return new HttpPseudoAuthenticator();
38 | }
39 | }
--------------------------------------------------------------------------------
/src/main/java/org/apache/hadoop/fs/http/client/HttpPseudoAuthenticator.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Licensed to the Apache Software Foundation (ASF) under one
3 | * or more contributor license agreements. See the NOTICE file
4 | * distributed with this work for additional information
5 | * regarding copyright ownership. The ASF licenses this file
6 | * to you under the Apache License, Version 2.0 (the
7 | * "License"); you may not use this file except in compliance
8 | * with the License. You may obtain a copy of the License at
9 | *
10 | * http://www.apache.org/licenses/LICENSE-2.0
11 | *
12 | * Unless required by applicable law or agreed to in writing, software
13 | * distributed under the License is distributed on an "AS IS" BASIS,
14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | * See the License for the specific language governing permissions and
16 | * limitations under the License.
17 | */
18 | package org.apache.hadoop.fs.http.client;
19 |
20 | import java.io.IOException;
21 |
22 | import org.apache.hadoop.security.UserGroupInformation;
23 | import org.apache.hadoop.security.authentication.client.PseudoAuthenticator;
24 |
25 | /**
26 | * A PseudoAuthenticator
subclass that uses FileSystemAccess's
27 | * UserGroupInformation
to obtain the client user name (the UGI's login user).
28 | */
29 | public class HttpPseudoAuthenticator extends PseudoAuthenticator {
30 |
31 | /**
32 | * Return the client user name.
33 | *
34 | * @return the client user name.
35 | */
36 | @Override
37 | protected String getUserName() {
38 | try {
39 | return UserGroupInformation.getLoginUser().getUserName();
40 | } catch (IOException ex) {
41 | throw new SecurityException("Could not obtain current user, " + ex.getMessage(), ex);
42 | }
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/main/resources/httpfs.properties:
--------------------------------------------------------------------------------
1 | #httpfs server host ip
2 | host=192.168.230.129
3 | #httpfs server port
4 | port=14000
--------------------------------------------------------------------------------
/src/test/java/com/catt/httpfs/client/httpclient/Demo.java:
--------------------------------------------------------------------------------
1 | package com.catt.httpfs.client.httpclient;
2 |
3 | public class Demo {
4 | public static void main(String[] args) {
5 | HttpFSClient client = new HttpFSClient();
6 | client.initCookie();
7 |
8 | // 获取当前用户的目录
9 | client.get("", "op=gethomedirectory");
10 |
11 | // // 上传文件
12 | // client.put("/test", "op=CREATE&buffersize=1000");
13 | // client.upload("/test/pom.xml", "op=CREATE&buffersize=1000&data=true",
14 | // "pom.xml");
15 | //
16 | // // 删除文件
17 | // client.delete("/test2/demo.xml", "op=DELETE");
18 | //
19 | // // 创建目录
20 | // client.put("/test2/test9", "op=MKDIRS");
21 | //
22 | // // 读取文件
23 | // client.get("/test/data.txt",
24 | // "op=OPEN&buffersize=10000&data=true",true);
25 |
26 | // 获取文件列表信息
27 | String result = client.get("/test", "op=LISTSTATUS");
28 | System.out.println(result);
29 | // 处理返回信息
30 | HttpFSUtils.parseResult(result);
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/src/test/java/org/apache/hadoop/fs/http/client/Demo.java:
--------------------------------------------------------------------------------
1 | package org.apache.hadoop.fs.http.client;
2 |
3 | import java.net.URL;
4 |
5 | import org.apache.hadoop.conf.Configuration;
6 | import org.apache.hadoop.fs.FileSystem;
7 | import org.apache.hadoop.fs.Path;
8 |
9 | import com.catt.httpfs.client.utils.HttpFSConf;
10 |
11 | public class Demo {
12 | public static void main(String[] args) throws Exception {
13 | Configuration conf = new Configuration();
14 | conf.set("fs.defaultFS", "hdfs://localhost.localdomain:8020/");
15 |
16 | Path path = new Path("/test2/test3");
17 | FileSystem fs = getHttpFileSystem();
18 | fs.mkdirs(path);
19 | fs.close();
20 | fs = FileSystem.get(conf);
21 | System.out.println(fs.exists(path));
22 | fs.close();
23 | }
24 |
25 | protected static FileSystem getHttpFileSystem() throws Exception {
26 | Configuration conf = new Configuration();
27 | conf.set("fs.http.impl", HttpFSFileSystem.class.getName());
28 | String url = "http://" + HttpFSConf.getHOST() + ":"
29 | + HttpFSConf.getPORT();
30 | return FileSystem.get(new URL(url).toURI(), conf);
31 | }
32 | }
33 |
--------------------------------------------------------------------------------