├── .classpath ├── .gitignore ├── .project ├── .settings ├── org.eclipse.core.resources.prefs ├── org.eclipse.jdt.core.prefs └── org.eclipse.m2e.core.prefs ├── README.txt ├── pom.xml └── src ├── main ├── java │ ├── com │ │ └── catt │ │ │ └── httpfs │ │ │ └── client │ │ │ ├── httpclient │ │ │ ├── HttpFSClient.java │ │ │ └── HttpFSUtils.java │ │ │ └── utils │ │ │ └── HttpFSConf.java │ └── org │ │ └── apache │ │ └── hadoop │ │ └── fs │ │ └── http │ │ └── client │ │ ├── HttpFSFileSystem.java │ │ ├── HttpKerberosAuthenticator.java │ │ └── HttpPseudoAuthenticator.java └── resources │ └── httpfs.properties └── test └── java ├── com └── catt │ └── httpfs │ └── client │ └── httpclient │ └── Demo.java └── org └── apache └── hadoop └── fs └── http └── client └── Demo.java /.classpath: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | httpfs-client 4 | 5 | 6 | 7 | 8 | 9 | org.eclipse.jdt.core.javabuilder 10 | 11 | 12 | 13 | 14 | org.eclipse.m2e.core.maven2Builder 15 | 16 | 17 | 18 | 19 | 20 | org.eclipse.jdt.core.javanature 21 | org.eclipse.m2e.core.maven2Nature 22 | 23 | 24 | -------------------------------------------------------------------------------- /.settings/org.eclipse.core.resources.prefs: -------------------------------------------------------------------------------- 1 | eclipse.preferences.version=1 2 | encoding//src/main/java=utf-8 3 | encoding//src/main/resources=utf-8 4 | encoding//src/test/java=utf-8 5 | encoding/=utf-8 6 | -------------------------------------------------------------------------------- /.settings/org.eclipse.jdt.core.prefs: -------------------------------------------------------------------------------- 1 | eclipse.preferences.version=1 2 | org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled 3 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6 4 | org.eclipse.jdt.core.compiler.compliance=1.6 5 | org.eclipse.jdt.core.compiler.problem.assertIdentifier=error 6 | org.eclipse.jdt.core.compiler.problem.enumIdentifier=error 7 | org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning 8 | org.eclipse.jdt.core.compiler.source=1.6 9 | -------------------------------------------------------------------------------- /.settings/org.eclipse.m2e.core.prefs: -------------------------------------------------------------------------------- 1 | activeProfiles= 2 | eclipse.preferences.version=1 3 | resolveWorkspaceProjects=true 4 | version=1 5 | -------------------------------------------------------------------------------- /README.txt: -------------------------------------------------------------------------------- 1 | httpfs-client util 2 | httpfs-client is a client tool of HttpFS server which provides a REST HTTP gateway to HDFS with full 3 | filesystem read & write capabilities,httpfs-client read & write hdfs filesystem with the webhdfs REST HTTP API 4 | 5 | example 6 | please read com.catt.httpfs.client.httpclient.Demo and org.apache.hadoop.fs.http.client.Demo 7 | 8 | Requirements: 9 | * JDK 1.6.* 10 | * Maven 3.* 11 | 12 | How to build: 13 | Clone this Git repository. 14 | 15 | Run 'mvn package'. 16 | 17 | The resulting TARBALL will under the 'target/' directory. 18 | 19 | 功能说明: 20 | 1 包com.catt.httpfs.client.httpclient是采用commons-httpclient.jar, 21 | 基于http请求实现的,没有使用到hadoop相关的jar 22 | 2 包org.apache.hadoop.fs.http.client根据httpfs项目的源代码, 23 | 根据需要修改了一下,使用了hadoop相关的jar 24 | 25 | Reference 26 | http://cloudera.github.com/httpfs/ 27 | 28 | Contact me 29 | http://my.oschina.net/cloudcoder 30 | 欢迎各位多提宝贵意见,并贡献你的力量 31 | 32 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | 5 | com.catt 6 | httpfs-client 7 | 1.0.0-SNAPSHOT 8 | jar 9 | 10 | httpfs-client 11 | http://maven.apache.org 12 | 13 | 14 | 15 | 2.0.0-cdh4.4.0 16 | 4.11 17 | utf-8 18 | utf-8 19 | 20 | 21 | 22 | 23 | 24 | org.apache.hadoop 25 | hadoop-client 26 | 2.0.0-mr1-cdh4.4.0 27 | provided 28 | 29 | 30 | org.apache.hadoop 31 | hadoop-auth 32 | ${hadoop.version} 33 | provided 34 | 35 | 36 | org.apache.hadoop 37 | hadoop-common 38 | ${hadoop.version} 39 | provided 40 | 41 | 42 | commons-httpclient 43 | commons-httpclient 44 | 3.1 45 | 46 | 47 | com.googlecode.json-simple 48 | json-simple 49 | 1.1 50 | 51 | 52 | 53 | 54 | commons-logging 55 | commons-logging 56 | 1.1.1 57 | 58 | 59 | log4j 60 | log4j 61 | 1.2.17 62 | provided 63 | 64 | 65 | org.slf4j 66 | slf4j-api 67 | 1.6.2 68 | provided 69 | 70 | 71 | org.slf4j 72 | slf4j-log4j12 73 | 1.6.2 74 | provided 75 | 76 | 77 | 78 | 79 | junit 80 | junit 81 | ${junit.version} 82 | test 83 | 84 | 85 | 86 | 87 | 88 | cloudera 89 | https://repository.cloudera.com/artifactory/cloudera-repos 90 | 91 | true 92 | 93 | 94 | false 95 | 96 | 97 | 98 | -------------------------------------------------------------------------------- /src/main/java/com/catt/httpfs/client/httpclient/HttpFSClient.java: -------------------------------------------------------------------------------- 1 | package com.catt.httpfs.client.httpclient; 2 | 3 | import java.io.BufferedReader; 4 | import java.io.File; 5 | import java.io.IOException; 6 | import java.io.InputStream; 7 | import java.io.InputStreamReader; 8 | import java.io.PrintWriter; 9 | 10 | import org.apache.commons.httpclient.Cookie; 11 | import org.apache.commons.httpclient.HttpClient; 12 | import org.apache.commons.httpclient.HttpMethod; 13 | import org.apache.commons.httpclient.cookie.CookiePolicy; 14 | import org.apache.commons.httpclient.cookie.CookieSpec; 15 | import org.apache.commons.httpclient.methods.DeleteMethod; 16 | import org.apache.commons.httpclient.methods.GetMethod; 17 | import org.apache.commons.httpclient.methods.PostMethod; 18 | import org.apache.commons.httpclient.methods.PutMethod; 19 | import org.apache.commons.httpclient.methods.multipart.FilePart; 20 | import org.apache.commons.httpclient.methods.multipart.MultipartRequestEntity; 21 | import org.apache.commons.httpclient.methods.multipart.Part; 22 | import org.apache.commons.logging.Log; 23 | import org.apache.commons.logging.LogFactory; 24 | 25 | import com.catt.httpfs.client.utils.HttpFSConf; 26 | 27 | public class HttpFSClient { 28 | private final static Log log = LogFactory.getLog(HttpFSClient.class); 29 | private final static String CHARSET = "UTF8"; 30 | 31 | private Cookie[] cookies; 32 | private boolean isInitCookie = false; 33 | 34 | public void initCookie() { 35 | String url = HttpFSUtils.createURL("", 36 | "op=gethomedirectory&user.name=hdfs"); 37 | System.out.println(url); 38 | HttpClient client = new HttpClient(); 39 | HttpMethod method = new GetMethod(url); 40 | try { 41 | client.executeMethod(method); 42 | getCookie(client); 43 | } catch (Exception e) { 44 | log.error("init cookie value fail", e); 45 | } 46 | this.isInitCookie = true; 47 | method.releaseConnection(); 48 | } 49 | 50 | public String get(String path, String params) { 51 | return this.request(path, params, GetMethod.class); 52 | } 53 | 54 | public String get(String path, String params, boolean isGenFile) { 55 | return this.request(path, params, GetMethod.class, isGenFile, null); 56 | } 57 | 58 | public String delete(String path, String params) { 59 | return this.request(path, params, DeleteMethod.class); 60 | } 61 | 62 | public String put(String path, String params) { 63 | return this.request(path, params, PutMethod.class); 64 | } 65 | 66 | public String post(String path, String params) { 67 | return this.request(path, params, PostMethod.class); 68 | } 69 | 70 | public String upload(String path, String params, String fileName) { 71 | return this.request(path, params, fileName); 72 | } 73 | 74 | private String request(String path, String params, Class clz) { 75 | return this.request(path, params, clz, false, null); 76 | } 77 | 78 | private String request(String path, String params, String fileName) { 79 | return this.request(path, params, null, false, fileName); 80 | } 81 | 82 | private String request(String path, String params, Class clz, 83 | boolean isGenFile, String fileName) { 84 | if (this.isInitCookie == false) { 85 | return "please init cookie first"; 86 | } 87 | 88 | HttpClient client = new HttpClient(); 89 | // 由于要上传的文件可能比较大 , 因此在此设置最大的连接超时时间 90 | client.getHttpConnectionManager().getParams() 91 | .setConnectionTimeout(5000); 92 | client.getState().addCookies(cookies); 93 | StringBuffer sb = new StringBuffer(); 94 | int status = -1; 95 | 96 | HttpMethod method = getMethod(path, params, clz, fileName); 97 | try { 98 | status = client.executeMethod(method); 99 | if (isGenFile == false) { 100 | this.dealResponse(method.getResponseBodyAsStream(), sb); 101 | } else { 102 | this.dealResponse(method.getResponseBodyAsStream(), path); 103 | } 104 | } catch (Exception e) { 105 | log.error("getMethod fail", e); 106 | } 107 | method.releaseConnection(); 108 | log.info(method.getStatusLine() + "," + sb.toString()); 109 | return sb.toString(); 110 | } 111 | 112 | private HttpMethod getMethod(String path, String params, Class clz, 113 | String fileName) { 114 | HttpMethod method = null; 115 | String url = HttpFSUtils.createURL(path, params); 116 | try { 117 | if (null == clz) { 118 | method = getMethod(path, params, fileName); 119 | } else { 120 | method = (HttpMethod) clz.getConstructor(String.class) 121 | .newInstance(url); 122 | } 123 | method.setRequestHeader("content-type", "application/octet-stream"); 124 | } catch (Exception e) { 125 | log.error("getMethod fail", e); 126 | } 127 | return method; 128 | } 129 | 130 | private HttpMethod getMethod(String path, String params, String fileName) { 131 | String url = HttpFSUtils.createURL(path, params); 132 | PutMethod method = new PutMethod(url); 133 | method.setRequestHeader("content-type", "application/octet-stream"); 134 | try { 135 | // 设置上传文件 136 | File targetFile = new File(fileName); 137 | Part[] parts = { new FilePart(targetFile.getName(), targetFile) }; 138 | method.setRequestEntity(new MultipartRequestEntity(parts, method 139 | .getParams())); 140 | } catch (Exception e) { 141 | log.error("getMethod fail", e); 142 | } 143 | return method; 144 | } 145 | 146 | private void getCookie(HttpClient client) { 147 | CookieSpec cookiespec = CookiePolicy.getDefaultSpec(); 148 | this.cookies = cookiespec.match(HttpFSConf.getHOST(), HttpFSConf 149 | .getPORT(), "/", false, client.getState().getCookies()); 150 | } 151 | 152 | /** 153 | * deal HttpResponse content 154 | * 155 | * @param conn 156 | * @param sb 157 | * @return 158 | * @throws IOException 159 | */ 160 | private String dealResponse(InputStream in, StringBuffer sb) { 161 | BufferedReader reader = null; 162 | try { 163 | reader = new BufferedReader(new InputStreamReader(in, CHARSET)); 164 | String line = null; 165 | while ((line = reader.readLine()) != null) { 166 | sb.append(line); 167 | } 168 | } catch (IOException e) { 169 | log.error("deal response content fail", e); 170 | } finally { 171 | if (reader != null) { 172 | try { 173 | reader.close(); 174 | } catch (IOException e) { 175 | log.error("close BufferedReader error", e); 176 | } 177 | } 178 | } 179 | return sb.toString(); 180 | } 181 | 182 | private void dealResponse(InputStream in, String path) { 183 | int pos = path.lastIndexOf("/"); 184 | String fileName = path.substring(pos + 1); 185 | BufferedReader reader = null; 186 | PrintWriter out = null; 187 | try { 188 | out = new PrintWriter(fileName); 189 | reader = new BufferedReader(new InputStreamReader(in, CHARSET)); 190 | String line = null; 191 | while ((line = reader.readLine()) != null) { 192 | out.append(line + "\n"); 193 | } 194 | } catch (IOException e) { 195 | log.error("deal response content fail", e); 196 | } finally { 197 | if (reader != null) { 198 | try { 199 | reader.close(); 200 | } catch (IOException e) { 201 | log.error("close BufferedReader error", e); 202 | } 203 | } 204 | if (out != null) { 205 | try { 206 | out.close(); 207 | } catch (Exception e) { 208 | log.error("close PrintWriter error", e); 209 | } 210 | } 211 | } 212 | } 213 | } -------------------------------------------------------------------------------- /src/main/java/com/catt/httpfs/client/httpclient/HttpFSUtils.java: -------------------------------------------------------------------------------- 1 | package com.catt.httpfs.client.httpclient; 2 | 3 | import java.io.InputStream; 4 | import java.io.InputStreamReader; 5 | import java.util.Iterator; 6 | import java.util.Set; 7 | 8 | import org.apache.commons.logging.Log; 9 | import org.apache.commons.logging.LogFactory; 10 | import org.json.simple.JSONArray; 11 | import org.json.simple.JSONObject; 12 | import org.json.simple.parser.JSONParser; 13 | 14 | import com.catt.httpfs.client.utils.HttpFSConf; 15 | 16 | public class HttpFSUtils { 17 | private final static Log log = LogFactory.getLog(HttpFSUtils.class); 18 | private static final String SERVICE_PATH = "/webhdfs/v1"; 19 | private static final String SCHEME = "http"; 20 | 21 | public static String createURL(String path, String params) { 22 | StringBuilder sb = new StringBuilder(); 23 | sb.append(SCHEME).append("://").append(HttpFSConf.getHOST()) 24 | .append(":").append(HttpFSConf.getPORT()).append(SERVICE_PATH) 25 | .append(path).append("?").append(params); 26 | return sb.toString(); 27 | } 28 | 29 | public static void validateResponse(InputStream in, int respCode, 30 | int expectedCode) { 31 | if (respCode != expectedCode) { 32 | JSONObject json = (JSONObject) jsonParse(in); 33 | json = (JSONObject) json.get("RemoteException"); 34 | String message = (String) json.get("message"); 35 | String exception = (String) json.get("exception"); 36 | String className = (String) json.get("javaClassName"); 37 | } 38 | } 39 | 40 | public static void parseResult(String str) { 41 | try { 42 | JSONParser parser = new JSONParser(); 43 | JSONObject json = (JSONObject) parser.parse(str); 44 | json = (JSONObject) json.get("FileStatuses"); 45 | JSONArray jsonArray = (JSONArray) json.get("FileStatus"); 46 | for (int i = 0; i < jsonArray.size(); i++) { 47 | JSONObject obj = (JSONObject) jsonArray.get(i); 48 | Set set = obj.keySet(); 49 | for (Iterator it = set.iterator(); it.hasNext();) { 50 | String key = (String) it.next(); 51 | log.info(key + "==>" + obj.get(key)); 52 | } 53 | } 54 | } catch (Exception e) { 55 | log.error("jsonParse fail", e); 56 | } 57 | } 58 | 59 | public static Object jsonParse(InputStream in) { 60 | try { 61 | JSONParser parser = new JSONParser(); 62 | return parser.parse(new InputStreamReader(in)); 63 | } catch (Exception e) { 64 | log.error("jsonParse fail", e); 65 | } 66 | return null; 67 | } 68 | } -------------------------------------------------------------------------------- /src/main/java/com/catt/httpfs/client/utils/HttpFSConf.java: -------------------------------------------------------------------------------- 1 | package com.catt.httpfs.client.utils; 2 | 3 | import java.io.InputStream; 4 | import java.util.Iterator; 5 | import java.util.Properties; 6 | import java.util.Set; 7 | 8 | import org.apache.commons.logging.Log; 9 | import org.apache.commons.logging.LogFactory; 10 | 11 | public class HttpFSConf { 12 | private final static Log log = LogFactory.getLog(HttpFSConf.class); 13 | 14 | private static String HOST; 15 | private static int PORT = 14000; 16 | 17 | public static String getHOST() { 18 | return HOST; 19 | } 20 | 21 | public static void setHOST(String hOST) { 22 | HOST = hOST; 23 | } 24 | 25 | public static int getPORT() { 26 | return PORT; 27 | } 28 | 29 | public static void setPORT(int pORT) { 30 | PORT = pORT; 31 | } 32 | 33 | static { 34 | Properties prop = new Properties(); 35 | InputStream in; 36 | try { 37 | in = HttpFSConf.class.getResourceAsStream("/httpfs.properties"); 38 | prop.load(in); 39 | in.close(); 40 | } catch (Exception e) { 41 | log.error("load httpfs.properties fail", e); 42 | } 43 | 44 | Set keyValue = prop.keySet(); 45 | for (Iterator it = keyValue.iterator(); it.hasNext();) { 46 | String key = (String) it.next(); 47 | if ("host".equals(key)) { 48 | HttpFSConf.setHOST(prop.getProperty(key)); 49 | } else if ("port".equals(key)) { 50 | HttpFSConf.setPORT(Integer.parseInt(prop.getProperty(key, 51 | "14000"))); 52 | } 53 | } 54 | } 55 | 56 | public static void main(String[] args) { 57 | log.info(HttpFSConf.getHOST()); 58 | log.info(HttpFSConf.getPORT()); 59 | } 60 | 61 | } 62 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | package org.apache.hadoop.fs.http.client; 19 | 20 | import org.apache.hadoop.conf.Configuration; 21 | import org.apache.hadoop.fs.ContentSummary; 22 | import org.apache.hadoop.fs.FSDataInputStream; 23 | import org.apache.hadoop.fs.FSDataOutputStream; 24 | import org.apache.hadoop.fs.FileChecksum; 25 | import org.apache.hadoop.fs.FileStatus; 26 | import org.apache.hadoop.fs.FileSystem; 27 | import org.apache.hadoop.fs.Path; 28 | import org.apache.hadoop.fs.PositionedReadable; 29 | import org.apache.hadoop.fs.Seekable; 30 | import org.apache.hadoop.fs.permission.FsPermission; 31 | import org.apache.hadoop.security.UserGroupInformation; 32 | import org.apache.hadoop.security.authentication.client.AuthenticatedURL; 33 | import org.apache.hadoop.security.authentication.client.Authenticator; 34 | import org.apache.hadoop.util.Progressable; 35 | import org.apache.hadoop.util.ReflectionUtils; 36 | import org.apache.hadoop.util.StringUtils; 37 | import org.json.simple.JSONArray; 38 | import org.json.simple.JSONObject; 39 | import org.json.simple.parser.JSONParser; 40 | import org.json.simple.parser.ParseException; 41 | 42 | import java.io.BufferedInputStream; 43 | import java.io.BufferedOutputStream; 44 | import java.io.DataInput; 45 | import java.io.DataOutput; 46 | import java.io.FileNotFoundException; 47 | import java.io.FilterInputStream; 48 | import java.io.IOException; 49 | import java.io.InputStream; 50 | import java.io.InputStreamReader; 51 | import java.io.OutputStream; 52 | import java.lang.reflect.Constructor; 53 | import java.net.HttpURLConnection; 54 | import java.net.URI; 55 | import java.net.URISyntaxException; 56 | import java.net.URL; 57 | import java.net.URLEncoder; 58 | import java.text.MessageFormat; 59 | import java.util.HashMap; 60 | import java.util.Map; 61 | 62 | /** 63 | * HttpFSServer implementation of the FileSystemAccess FileSystem. 64 | *

65 | * This implementation allows a user to access HDFS over HTTP via a HttpFSServer 66 | * server. 67 | */ 68 | public class HttpFSFileSystem extends FileSystem { 69 | 70 | public static final String SERVICE_NAME = "/webhdfs"; 71 | 72 | public static final String SERVICE_VERSION = "/v1"; 73 | 74 | public static final String SERVICE_PREFIX = SERVICE_NAME + SERVICE_VERSION; 75 | 76 | public static final String OP_PARAM = "op"; 77 | public static final String DO_AS_PARAM = "doas"; 78 | public static final String OVERWRITE_PARAM = "overwrite"; 79 | public static final String REPLICATION_PARAM = "replication"; 80 | public static final String BLOCKSIZE_PARAM = "blocksize"; 81 | public static final String PERMISSION_PARAM = "permission"; 82 | public static final String DESTINATION_PARAM = "destination"; 83 | public static final String RECURSIVE_PARAM = "recursive"; 84 | public static final String OWNER_PARAM = "owner"; 85 | public static final String GROUP_PARAM = "group"; 86 | public static final String MODIFICATION_TIME_PARAM = "modificationtime"; 87 | public static final String ACCESS_TIME_PARAM = "accesstime"; 88 | public static final String RENEWER_PARAM = "renewer"; 89 | 90 | public static final Short DEFAULT_PERMISSION = 0755; 91 | 92 | public static final String RENAME_JSON = "boolean"; 93 | 94 | public static final String DELETE_JSON = "boolean"; 95 | 96 | public static final String MKDIRS_JSON = "boolean"; 97 | 98 | public static final String HOME_DIR_JSON = "Path"; 99 | 100 | public static final String SET_REPLICATION_JSON = "boolean"; 101 | 102 | public static final String UPLOAD_CONTENT_TYPE = "application/octet-stream"; 103 | 104 | public static enum FILE_TYPE { 105 | FILE, DIRECTORY, SYMLINK; 106 | 107 | public static FILE_TYPE getType(FileStatus fileStatus) { 108 | if (!fileStatus.isDir()) { 109 | return FILE; 110 | } 111 | if (fileStatus.isDir()) { 112 | return DIRECTORY; 113 | } 114 | throw new IllegalArgumentException( 115 | "Could not determine filetype for: " + fileStatus.getPath()); 116 | } 117 | } 118 | 119 | public static final String FILE_STATUSES_JSON = "FileStatuses"; 120 | public static final String FILE_STATUS_JSON = "FileStatus"; 121 | public static final String PATH_SUFFIX_JSON = "pathSuffix"; 122 | public static final String TYPE_JSON = "type"; 123 | public static final String LENGTH_JSON = "length"; 124 | public static final String OWNER_JSON = "owner"; 125 | public static final String GROUP_JSON = "group"; 126 | public static final String PERMISSION_JSON = "permission"; 127 | public static final String ACCESS_TIME_JSON = "accessTime"; 128 | public static final String MODIFICATION_TIME_JSON = "modificationTime"; 129 | public static final String BLOCK_SIZE_JSON = "blockSize"; 130 | public static final String REPLICATION_JSON = "replication"; 131 | 132 | public static final String FILE_CHECKSUM_JSON = "FileChecksum"; 133 | public static final String CHECKSUM_ALGORITHM_JSON = "algorithm"; 134 | public static final String CHECKSUM_BYTES_JSON = "bytes"; 135 | public static final String CHECKSUM_LENGTH_JSON = "length"; 136 | 137 | public static final String CONTENT_SUMMARY_JSON = "ContentSummary"; 138 | public static final String CONTENT_SUMMARY_DIRECTORY_COUNT_JSON = "directoryCount"; 139 | public static final String CONTENT_SUMMARY_FILE_COUNT_JSON = "fileCount"; 140 | public static final String CONTENT_SUMMARY_LENGTH_JSON = "length"; 141 | public static final String CONTENT_SUMMARY_QUOTA_JSON = "quota"; 142 | public static final String CONTENT_SUMMARY_SPACE_CONSUMED_JSON = "spaceConsumed"; 143 | public static final String CONTENT_SUMMARY_SPACE_QUOTA_JSON = "spaceQuota"; 144 | 145 | public static final String DELEGATION_TOKEN_JSON = "Token"; 146 | public static final String DELEGATION_TOKEN_URL_STRING_JSON = "urlString"; 147 | 148 | public static final String ERROR_JSON = "RemoteException"; 149 | public static final String ERROR_EXCEPTION_JSON = "exception"; 150 | public static final String ERROR_CLASSNAME_JSON = "javaClassName"; 151 | public static final String ERROR_MESSAGE_JSON = "message"; 152 | 153 | public static final int HTTP_TEMPORARY_REDIRECT = 307; 154 | 155 | private static final String HTTP_GET = "GET"; 156 | private static final String HTTP_PUT = "PUT"; 157 | private static final String HTTP_POST = "POST"; 158 | private static final String HTTP_DELETE = "DELETE"; 159 | 160 | public enum Operation { 161 | OPEN(HTTP_GET), GETFILESTATUS(HTTP_GET), LISTSTATUS(HTTP_GET), GETHOMEDIRECTORY( 162 | HTTP_GET), GETCONTENTSUMMARY(HTTP_GET), GETFILECHECKSUM( 163 | HTTP_GET), GETFILEBLOCKLOCATIONS(HTTP_GET), INSTRUMENTATION( 164 | HTTP_GET), APPEND(HTTP_POST), CREATE(HTTP_PUT), MKDIRS(HTTP_PUT), RENAME( 165 | HTTP_PUT), SETOWNER(HTTP_PUT), SETPERMISSION(HTTP_PUT), SETREPLICATION( 166 | HTTP_PUT), SETTIMES(HTTP_PUT), DELETE(HTTP_DELETE); 167 | 168 | private String httpMethod; 169 | 170 | Operation(String httpMethod) { 171 | this.httpMethod = httpMethod; 172 | } 173 | 174 | public String getMethod() { 175 | return httpMethod; 176 | } 177 | 178 | } 179 | 180 | private AuthenticatedURL.Token authToken = new AuthenticatedURL.Token(); 181 | private URI uri; 182 | private Path workingDir; 183 | private String doAs; 184 | 185 | /** 186 | * Convenience method that creates a HttpURLConnection for the 187 | * HttpFSServer file system operations. 188 | *

189 | * This methods performs and injects any needed authentication credentials 190 | * via the {@link #getConnection(URL, String)} method 191 | * 192 | * @param method 193 | * the HTTP method. 194 | * @param params 195 | * the query string parameters. 196 | * @param path 197 | * the file path 198 | * @param makeQualified 199 | * if the path should be 'makeQualified' 200 | * 201 | * @return a HttpURLConnection for the HttpFSServer server, 202 | * authenticated and ready to use for the specified path and file 203 | * system operation. 204 | * 205 | * @throws IOException 206 | * thrown if an IO error occurrs. 207 | */ 208 | private HttpURLConnection getConnection(String method, 209 | Map params, Path path, boolean makeQualified) 210 | throws IOException { 211 | // params.put(DO_AS_PARAM, doAs); 212 | //todo 需要修改 213 | params.put("user.name", doAs); 214 | if (makeQualified) { 215 | path = makeQualified(path); 216 | } 217 | URI uri = path.toUri(); 218 | StringBuilder sb = new StringBuilder(); 219 | sb.append(uri.getScheme()).append("://").append(uri.getAuthority()) 220 | .append(SERVICE_PREFIX).append(uri.getPath()); 221 | 222 | String separator = "?"; 223 | for (Map.Entry entry : params.entrySet()) { 224 | sb.append(separator).append(entry.getKey()).append("=") 225 | .append(URLEncoder.encode(entry.getValue(), "UTF8")); 226 | separator = "&"; 227 | } 228 | URL url = new URL(sb.toString()); 229 | return getConnection(url, method); 230 | } 231 | 232 | /** 233 | * Convenience method that creates a HttpURLConnection for the 234 | * specified URL. 235 | *

236 | * This methods performs and injects any needed authentication credentials. 237 | * 238 | * @param url 239 | * url to connect to. 240 | * @param method 241 | * the HTTP method. 242 | * 243 | * @return a HttpURLConnection for the HttpFSServer server, 244 | * authenticated and ready to use for the specified path and file 245 | * system operation. 246 | * 247 | * @throws IOException 248 | * thrown if an IO error occurrs. 249 | */ 250 | private HttpURLConnection getConnection(URL url, String method) 251 | throws IOException { 252 | Class klass = getConf().getClass( 253 | "httpfs.authenticator.class", HttpKerberosAuthenticator.class, 254 | Authenticator.class); 255 | Authenticator authenticator = ReflectionUtils.newInstance(klass, 256 | getConf()); 257 | try { 258 | HttpURLConnection conn = new AuthenticatedURL(authenticator) 259 | .openConnection(url, authToken); 260 | conn.setRequestMethod(method); 261 | if (method.equals(HTTP_POST) || method.equals(HTTP_PUT)) { 262 | conn.setDoOutput(true); 263 | } 264 | return conn; 265 | } catch (Exception ex) { 266 | throw new IOException(ex); 267 | } 268 | } 269 | 270 | /** 271 | * Convenience method that JSON Parses the InputStream of a 272 | * HttpURLConnection. 273 | * 274 | * @param conn 275 | * the HttpURLConnection. 276 | * 277 | * @return the parsed JSON object. 278 | * 279 | * @throws IOException 280 | * thrown if the InputStream could not be JSON 281 | * parsed. 282 | */ 283 | private static Object jsonParse(HttpURLConnection conn) throws IOException { 284 | try { 285 | JSONParser parser = new JSONParser(); 286 | return parser.parse(new InputStreamReader(conn.getInputStream())); 287 | } catch (ParseException ex) { 288 | throw new IOException("JSON parser error, " + ex.getMessage(), ex); 289 | } 290 | } 291 | 292 | /** 293 | * Validates the status of an HttpURLConnection against an 294 | * expected HTTP status code. If the current status code is not the expected 295 | * one it throws an exception with a detail message using Server side error 296 | * messages if available. 297 | * 298 | * @param conn 299 | * the HttpURLConnection. 300 | * @param expected 301 | * the expected HTTP status code. 302 | * 303 | * @throws IOException 304 | * thrown if the current status code does not match the expected 305 | * one. 306 | */ 307 | private static void validateResponse(HttpURLConnection conn, int expected) 308 | throws IOException { 309 | int status = conn.getResponseCode(); 310 | if (status != expected) { 311 | try { 312 | JSONObject json = (JSONObject) jsonParse(conn); 313 | json = (JSONObject) json.get(ERROR_JSON); 314 | String message = (String) json.get(ERROR_MESSAGE_JSON); 315 | String exception = (String) json.get(ERROR_EXCEPTION_JSON); 316 | String className = (String) json.get(ERROR_CLASSNAME_JSON); 317 | 318 | try { 319 | ClassLoader cl = HttpFSFileSystem.class.getClassLoader(); 320 | Class klass = cl.loadClass(className); 321 | Constructor constr = klass.getConstructor(String.class); 322 | throw (IOException) constr.newInstance(message); 323 | } catch (IOException ex) { 324 | throw ex; 325 | } catch (Exception ex) { 326 | throw new IOException(MessageFormat.format("{0} - {1}", 327 | exception, message)); 328 | } 329 | } catch (IOException ex) { 330 | if (ex.getCause() instanceof IOException) { 331 | throw (IOException) ex.getCause(); 332 | } 333 | throw new IOException(MessageFormat.format( 334 | "HTTP status [{0}], {1}", status, 335 | conn.getResponseMessage())); 336 | } 337 | } 338 | } 339 | 340 | /** 341 | * Called after a new FileSystem instance is constructed. 342 | * 343 | * @param name 344 | * a uri whose authority section names the host, port, etc. for 345 | * this FileSystem 346 | * @param conf 347 | * the configuration 348 | */ 349 | @Override 350 | public void initialize(URI name, Configuration conf) throws IOException { 351 | UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); 352 | doAs = ugi.getUserName(); 353 | //todo 需要修改 354 | doAs="hdfs"; 355 | super.initialize(name, conf); 356 | try { 357 | uri = new URI(name.getScheme() + "://" + name.getHost() + ":" 358 | + name.getPort()); 359 | } catch (URISyntaxException ex) { 360 | throw new IOException(ex); 361 | } 362 | } 363 | 364 | /** 365 | * Returns a URI whose scheme and authority identify this FileSystem. 366 | * 367 | * @return the URI whose scheme and authority identify this FileSystem. 368 | */ 369 | @Override 370 | public URI getUri() { 371 | return uri; 372 | } 373 | 374 | /** 375 | * HttpFSServer subclass of the FSDataInputStream. 376 | *

377 | * This implementation does not support the PositionReadable 378 | * and Seekable methods. 379 | */ 380 | private static class HttpFSDataInputStream extends FilterInputStream 381 | implements Seekable, PositionedReadable { 382 | 383 | protected HttpFSDataInputStream(InputStream in, int bufferSize) { 384 | super(new BufferedInputStream(in, bufferSize)); 385 | } 386 | 387 | @Override 388 | public int read(long position, byte[] buffer, int offset, int length) 389 | throws IOException { 390 | throw new UnsupportedOperationException(); 391 | } 392 | 393 | @Override 394 | public void readFully(long position, byte[] buffer, int offset, 395 | int length) throws IOException { 396 | throw new UnsupportedOperationException(); 397 | } 398 | 399 | @Override 400 | public void readFully(long position, byte[] buffer) throws IOException { 401 | throw new UnsupportedOperationException(); 402 | } 403 | 404 | @Override 405 | public void seek(long pos) throws IOException { 406 | throw new UnsupportedOperationException(); 407 | } 408 | 409 | @Override 410 | public long getPos() throws IOException { 411 | throw new UnsupportedOperationException(); 412 | } 413 | 414 | @Override 415 | public boolean seekToNewSource(long targetPos) throws IOException { 416 | throw new UnsupportedOperationException(); 417 | } 418 | } 419 | 420 | /** 421 | * Opens an FSDataInputStream at the indicated Path.

IMPORTANT: the 422 | * returned does not support the 423 | * PositionReadable and Seekable methods. 424 | * 425 | * @param f 426 | * the file name to open 427 | * @param bufferSize 428 | * the size of the buffer to be used. 429 | */ 430 | @Override 431 | public FSDataInputStream open(Path f, int bufferSize) throws IOException { 432 | Map params = new HashMap(); 433 | params.put(OP_PARAM, Operation.OPEN.toString()); 434 | HttpURLConnection conn = getConnection(Operation.OPEN.getMethod(), 435 | params, f, true); 436 | validateResponse(conn, HttpURLConnection.HTTP_OK); 437 | return new FSDataInputStream(new HttpFSDataInputStream( 438 | conn.getInputStream(), bufferSize)); 439 | } 440 | 441 | /** 442 | * HttpFSServer subclass of the FSDataOutputStream. 443 | *

444 | * This implementation closes the underlying HTTP connection validating the 445 | * Http connection status at closing time. 446 | */ 447 | private static class HttpFSDataOutputStream extends FSDataOutputStream { 448 | private HttpURLConnection conn; 449 | private int closeStatus; 450 | 451 | public HttpFSDataOutputStream(HttpURLConnection conn, OutputStream out, 452 | int closeStatus, Statistics stats) throws IOException { 453 | super(out, stats); 454 | this.conn = conn; 455 | this.closeStatus = closeStatus; 456 | } 457 | 458 | @Override 459 | public void close() throws IOException { 460 | try { 461 | super.close(); 462 | } finally { 463 | validateResponse(conn, closeStatus); 464 | } 465 | } 466 | 467 | } 468 | 469 | /** 470 | * Converts a FsPermission to a Unix octal representation. 471 | * 472 | * @param p 473 | * the permission. 474 | * 475 | * @return the Unix string symbolic reprentation. 476 | */ 477 | public static String permissionToString(FsPermission p) { 478 | return Integer.toString((p == null) ? DEFAULT_PERMISSION : p.toShort(), 479 | 8); 480 | } 481 | 482 | /* 483 | * Common handling for uploading data for create and append operations. 484 | */ 485 | private FSDataOutputStream uploadData(String method, Path f, 486 | Map params, int bufferSize, int expectedStatus) 487 | throws IOException { 488 | HttpURLConnection conn = getConnection(method, params, f, true); 489 | conn.setInstanceFollowRedirects(false); 490 | boolean exceptionAlreadyHandled = false; 491 | try { 492 | if (conn.getResponseCode() == HTTP_TEMPORARY_REDIRECT) { 493 | exceptionAlreadyHandled = true; 494 | String location = conn.getHeaderField("Location"); 495 | if (location != null) { 496 | conn = getConnection(new URL(location), method); 497 | conn.setRequestProperty("Content-Type", UPLOAD_CONTENT_TYPE); 498 | try { 499 | OutputStream os = new BufferedOutputStream( 500 | conn.getOutputStream(), bufferSize); 501 | return new HttpFSDataOutputStream(conn, os, 502 | expectedStatus, statistics); 503 | } catch (IOException ex) { 504 | validateResponse(conn, expectedStatus); 505 | throw ex; 506 | } 507 | } else { 508 | validateResponse(conn, HTTP_TEMPORARY_REDIRECT); 509 | throw new IOException( 510 | "Missing HTTP 'Location' header for [" 511 | + conn.getURL() + "]"); 512 | } 513 | } else { 514 | throw new IOException(MessageFormat.format( 515 | "Expected HTTP status was [307], received [{0}]", 516 | conn.getResponseCode())); 517 | } 518 | } catch (IOException ex) { 519 | if (exceptionAlreadyHandled) { 520 | throw ex; 521 | } else { 522 | validateResponse(conn, HTTP_TEMPORARY_REDIRECT); 523 | throw ex; 524 | } 525 | } 526 | } 527 | 528 | /** 529 | * Opens an FSDataOutputStream at the indicated Path with write-progress 530 | * reporting. 531 | *

532 | * IMPORTANT: The Progressable parameter is not used. 533 | * 534 | * @param f 535 | * the file name to open. 536 | * @param permission 537 | * file permission. 538 | * @param overwrite 539 | * if a file with this name already exists, then if true, the 540 | * file will be overwritten, and if false an error will be 541 | * thrown. 542 | * @param bufferSize 543 | * the size of the buffer to be used. 544 | * @param replication 545 | * required block replication for the file. 546 | * @param blockSize 547 | * block size. 548 | * @param progress 549 | * progressable. 550 | * 551 | * @throws IOException 552 | * @see #setPermission(Path, FsPermission) 553 | */ 554 | @Override 555 | public FSDataOutputStream create(Path f, FsPermission permission, 556 | boolean overwrite, int bufferSize, short replication, 557 | long blockSize, Progressable progress) throws IOException { 558 | Map params = new HashMap(); 559 | params.put(OP_PARAM, Operation.CREATE.toString()); 560 | params.put(OVERWRITE_PARAM, Boolean.toString(overwrite)); 561 | params.put(REPLICATION_PARAM, Short.toString(replication)); 562 | params.put(BLOCKSIZE_PARAM, Long.toString(blockSize)); 563 | params.put(PERMISSION_PARAM, permissionToString(permission)); 564 | return uploadData(Operation.CREATE.getMethod(), f, params, bufferSize, 565 | HttpURLConnection.HTTP_CREATED); 566 | } 567 | 568 | /** 569 | * Append to an existing file (optional operation). 570 | *

571 | * IMPORTANT: The Progressable parameter is not used. 572 | * 573 | * @param f 574 | * the existing file to be appended. 575 | * @param bufferSize 576 | * the size of the buffer to be used. 577 | * @param progress 578 | * for reporting progress if it is not null. 579 | * 580 | * @throws IOException 581 | */ 582 | @Override 583 | public FSDataOutputStream append(Path f, int bufferSize, 584 | Progressable progress) throws IOException { 585 | Map params = new HashMap(); 586 | params.put(OP_PARAM, Operation.APPEND.toString()); 587 | return uploadData(Operation.APPEND.getMethod(), f, params, bufferSize, 588 | HttpURLConnection.HTTP_OK); 589 | } 590 | 591 | /** 592 | * Renames Path src to Path dst. Can take place on local fs or remote DFS. 593 | */ 594 | @Override 595 | public boolean rename(Path src, Path dst) throws IOException { 596 | Map params = new HashMap(); 597 | params.put(OP_PARAM, Operation.RENAME.toString()); 598 | params.put(DESTINATION_PARAM, dst.toString()); 599 | HttpURLConnection conn = getConnection(Operation.RENAME.getMethod(), 600 | params, src, true); 601 | validateResponse(conn, HttpURLConnection.HTTP_OK); 602 | JSONObject json = (JSONObject) jsonParse(conn); 603 | return (Boolean) json.get(RENAME_JSON); 604 | } 605 | 606 | /** 607 | * Delete a file. 608 | * 609 | * @deprecated Use delete(Path, boolean) instead 610 | */ 611 | @SuppressWarnings({ "deprecation" }) 612 | @Deprecated 613 | @Override 614 | public boolean delete(Path f) throws IOException { 615 | return delete(f, false); 616 | } 617 | 618 | /** 619 | * Delete a file. 620 | * 621 | * @param f 622 | * the path to delete. 623 | * @param recursive 624 | * if path is a directory and set to true, the directory is 625 | * deleted else throws an exception. In case of a file the 626 | * recursive can be set to either true or false. 627 | * 628 | * @return true if delete is successful else false. 629 | * 630 | * @throws IOException 631 | */ 632 | @Override 633 | public boolean delete(Path f, boolean recursive) throws IOException { 634 | Map params = new HashMap(); 635 | params.put(OP_PARAM, Operation.DELETE.toString()); 636 | params.put(RECURSIVE_PARAM, Boolean.toString(recursive)); 637 | HttpURLConnection conn = getConnection(Operation.DELETE.getMethod(), 638 | params, f, true); 639 | validateResponse(conn, HttpURLConnection.HTTP_OK); 640 | JSONObject json = (JSONObject) jsonParse(conn); 641 | return (Boolean) json.get(DELETE_JSON); 642 | } 643 | 644 | /** 645 | * List the statuses of the files/directories in the given path if the path 646 | * is a directory. 647 | * 648 | * @param f 649 | * given path 650 | * 651 | * @return the statuses of the files/directories in the given patch 652 | * 653 | * @throws IOException 654 | */ 655 | @Override 656 | public FileStatus[] listStatus(Path f) throws IOException { 657 | Map params = new HashMap(); 658 | params.put(OP_PARAM, Operation.LISTSTATUS.toString()); 659 | HttpURLConnection conn = getConnection( 660 | Operation.LISTSTATUS.getMethod(), params, f, true); 661 | validateResponse(conn, HttpURLConnection.HTTP_OK); 662 | JSONObject json = (JSONObject) jsonParse(conn); 663 | json = (JSONObject) json.get(FILE_STATUSES_JSON); 664 | JSONArray jsonArray = (JSONArray) json.get(FILE_STATUS_JSON); 665 | FileStatus[] array = new FileStatus[jsonArray.size()]; 666 | f = makeQualified(f); 667 | for (int i = 0; i < jsonArray.size(); i++) { 668 | array[i] = createFileStatus(f, (JSONObject) jsonArray.get(i)); 669 | } 670 | return array; 671 | } 672 | 673 | /** 674 | * Set the current working directory for the given file system. All relative 675 | * paths will be resolved relative to it. 676 | * 677 | * @param newDir 678 | * new directory. 679 | */ 680 | @Override 681 | public void setWorkingDirectory(Path newDir) { 682 | workingDir = newDir; 683 | } 684 | 685 | /** 686 | * Get the current working directory for the given file system 687 | * 688 | * @return the directory pathname 689 | */ 690 | @Override 691 | public Path getWorkingDirectory() { 692 | if (workingDir == null) { 693 | workingDir = getHomeDirectory(); 694 | } 695 | return workingDir; 696 | } 697 | 698 | /** 699 | * Make the given file and all non-existent parents into directories. Has 700 | * the semantics of Unix 'mkdir -p'. Existence of the directory hierarchy is 701 | * not an error. 702 | */ 703 | @Override 704 | public boolean mkdirs(Path f, FsPermission permission) throws IOException { 705 | Map params = new HashMap(); 706 | params.put(OP_PARAM, Operation.MKDIRS.toString()); 707 | params.put(PERMISSION_PARAM, permissionToString(permission)); 708 | HttpURLConnection conn = getConnection(Operation.MKDIRS.getMethod(), 709 | params, f, true); 710 | validateResponse(conn, HttpURLConnection.HTTP_OK); 711 | JSONObject json = (JSONObject) jsonParse(conn); 712 | return (Boolean) json.get(MKDIRS_JSON); 713 | } 714 | 715 | /** 716 | * Return a file status object that represents the path. 717 | * 718 | * @param f 719 | * The path we want information from 720 | * 721 | * @return a FileStatus object 722 | * 723 | * @throws FileNotFoundException 724 | * when the path does not exist; IOException see specific 725 | * implementation 726 | */ 727 | @Override 728 | public FileStatus getFileStatus(Path f) throws IOException { 729 | Map params = new HashMap(); 730 | params.put(OP_PARAM, Operation.GETFILESTATUS.toString()); 731 | HttpURLConnection conn = getConnection( 732 | Operation.GETFILESTATUS.getMethod(), params, f, true); 733 | validateResponse(conn, HttpURLConnection.HTTP_OK); 734 | JSONObject json = (JSONObject) jsonParse(conn); 735 | json = (JSONObject) json.get(FILE_STATUS_JSON); 736 | f = makeQualified(f); 737 | return createFileStatus(f, json); 738 | } 739 | 740 | /** 741 | * Return the current user's home directory in this filesystem. The default 742 | * implementation returns "/user/$USER/". 743 | */ 744 | @Override 745 | public Path getHomeDirectory() { 746 | Map params = new HashMap(); 747 | params.put(OP_PARAM, Operation.GETHOMEDIRECTORY.toString()); 748 | try { 749 | HttpURLConnection conn = getConnection( 750 | Operation.GETHOMEDIRECTORY.getMethod(), params, new Path( 751 | getUri().toString(), "/"), false); 752 | validateResponse(conn, HttpURLConnection.HTTP_OK); 753 | JSONObject json = (JSONObject) jsonParse(conn); 754 | return new Path((String) json.get(HOME_DIR_JSON)); 755 | } catch (IOException ex) { 756 | throw new RuntimeException(ex); 757 | } 758 | } 759 | 760 | /** 761 | * Set owner of a path (i.e. a file or a directory). The parameters username 762 | * and groupname cannot both be null. 763 | * 764 | * @param p 765 | * The path 766 | * @param username 767 | * If it is null, the original username remains unchanged. 768 | * @param groupname 769 | * If it is null, the original groupname remains unchanged. 770 | */ 771 | @Override 772 | public void setOwner(Path p, String username, String groupname) 773 | throws IOException { 774 | Map params = new HashMap(); 775 | params.put(OP_PARAM, Operation.SETOWNER.toString()); 776 | params.put(OWNER_PARAM, username); 777 | params.put(GROUP_PARAM, groupname); 778 | HttpURLConnection conn = getConnection(Operation.SETOWNER.getMethod(), 779 | params, p, true); 780 | validateResponse(conn, HttpURLConnection.HTTP_OK); 781 | } 782 | 783 | /** 784 | * Set permission of a path. 785 | * 786 | * @param p 787 | * path. 788 | * @param permission 789 | * permission. 790 | */ 791 | @Override 792 | public void setPermission(Path p, FsPermission permission) 793 | throws IOException { 794 | Map params = new HashMap(); 795 | params.put(OP_PARAM, Operation.SETPERMISSION.toString()); 796 | params.put(PERMISSION_PARAM, permissionToString(permission)); 797 | HttpURLConnection conn = getConnection( 798 | Operation.SETPERMISSION.getMethod(), params, p, true); 799 | validateResponse(conn, HttpURLConnection.HTTP_OK); 800 | } 801 | 802 | /** 803 | * Set access time of a file 804 | * 805 | * @param p 806 | * The path 807 | * @param mtime 808 | * Set the modification time of this file. The number of 809 | * milliseconds since Jan 1, 1970. A value of -1 means that this 810 | * call should not set modification time. 811 | * @param atime 812 | * Set the access time of this file. The number of milliseconds 813 | * since Jan 1, 1970. A value of -1 means that this call should 814 | * not set access time. 815 | */ 816 | @Override 817 | public void setTimes(Path p, long mtime, long atime) throws IOException { 818 | Map params = new HashMap(); 819 | params.put(OP_PARAM, Operation.SETTIMES.toString()); 820 | params.put(MODIFICATION_TIME_PARAM, Long.toString(mtime)); 821 | params.put(ACCESS_TIME_PARAM, Long.toString(atime)); 822 | HttpURLConnection conn = getConnection(Operation.SETTIMES.getMethod(), 823 | params, p, true); 824 | validateResponse(conn, HttpURLConnection.HTTP_OK); 825 | } 826 | 827 | /** 828 | * Set replication for an existing file. 829 | * 830 | * @param src 831 | * file name 832 | * @param replication 833 | * new replication 834 | * 835 | * @return true if successful; false if file does not exist or is a 836 | * directory 837 | * 838 | * @throws IOException 839 | */ 840 | @Override 841 | public boolean setReplication(Path src, short replication) 842 | throws IOException { 843 | Map params = new HashMap(); 844 | params.put(OP_PARAM, Operation.SETREPLICATION.toString()); 845 | params.put(REPLICATION_PARAM, Short.toString(replication)); 846 | HttpURLConnection conn = getConnection( 847 | Operation.SETREPLICATION.getMethod(), params, src, true); 848 | validateResponse(conn, HttpURLConnection.HTTP_OK); 849 | JSONObject json = (JSONObject) jsonParse(conn); 850 | return (Boolean) json.get(SET_REPLICATION_JSON); 851 | } 852 | 853 | /** 854 | * Creates a FileStatus object using a JSON file-status payload 855 | * received from a HttpFSServer server. 856 | * 857 | * @param json 858 | * a JSON file-status payload received from a HttpFSServer server 859 | * 860 | * @return the corresponding FileStatus 861 | */ 862 | private FileStatus createFileStatus(Path parent, JSONObject json) { 863 | String pathSuffix = (String) json.get(PATH_SUFFIX_JSON); 864 | Path path = (pathSuffix.equals("")) ? parent : new Path(parent, 865 | pathSuffix); 866 | FILE_TYPE type = FILE_TYPE.valueOf((String) json.get(TYPE_JSON)); 867 | long len = (Long) json.get(LENGTH_JSON); 868 | String owner = (String) json.get(OWNER_JSON); 869 | String group = (String) json.get(GROUP_JSON); 870 | FsPermission permission = new FsPermission(Short.parseShort( 871 | (String) json.get(PERMISSION_JSON), 8)); 872 | long aTime = (Long) json.get(ACCESS_TIME_JSON); 873 | long mTime = (Long) json.get(MODIFICATION_TIME_JSON); 874 | long blockSize = (Long) json.get(BLOCK_SIZE_JSON); 875 | short replication = ((Long) json.get(REPLICATION_JSON)).shortValue(); 876 | FileStatus fileStatus = null; 877 | 878 | switch (type) { 879 | case FILE: 880 | case DIRECTORY: 881 | fileStatus = new FileStatus(len, (type == FILE_TYPE.DIRECTORY), 882 | replication, blockSize, mTime, aTime, permission, owner, 883 | group, path); 884 | break; 885 | case SYMLINK: 886 | throw new IllegalArgumentException( 887 | "SYMLINKs are not supported in cdh3 : " 888 | + fileStatus.getPath()); 889 | } 890 | return fileStatus; 891 | } 892 | 893 | @Override 894 | public ContentSummary getContentSummary(Path f) throws IOException { 895 | Map params = new HashMap(); 896 | params.put(OP_PARAM, Operation.GETCONTENTSUMMARY.toString()); 897 | HttpURLConnection conn = getConnection( 898 | Operation.GETCONTENTSUMMARY.getMethod(), params, f, true); 899 | validateResponse(conn, HttpURLConnection.HTTP_OK); 900 | JSONObject json = (JSONObject) ((JSONObject) jsonParse(conn)) 901 | .get(CONTENT_SUMMARY_JSON); 902 | return new ContentSummary((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON), 903 | (Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON), 904 | (Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON), 905 | (Long) json.get(CONTENT_SUMMARY_QUOTA_JSON), 906 | (Long) json.get(CONTENT_SUMMARY_SPACE_CONSUMED_JSON), 907 | (Long) json.get(CONTENT_SUMMARY_SPACE_QUOTA_JSON)); 908 | } 909 | 910 | @Override 911 | public FileChecksum getFileChecksum(Path f) throws IOException { 912 | Map params = new HashMap(); 913 | params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString()); 914 | HttpURLConnection conn = getConnection( 915 | Operation.GETFILECHECKSUM.getMethod(), params, f, true); 916 | validateResponse(conn, HttpURLConnection.HTTP_OK); 917 | final JSONObject json = (JSONObject) ((JSONObject) jsonParse(conn)) 918 | .get(FILE_CHECKSUM_JSON); 919 | return new FileChecksum() { 920 | @Override 921 | public String getAlgorithmName() { 922 | return (String) json.get(CHECKSUM_ALGORITHM_JSON); 923 | } 924 | 925 | @Override 926 | public int getLength() { 927 | return ((Long) json.get(CHECKSUM_LENGTH_JSON)).intValue(); 928 | } 929 | 930 | @Override 931 | public byte[] getBytes() { 932 | return StringUtils.hexStringToByte((String) json 933 | .get(CHECKSUM_BYTES_JSON)); 934 | } 935 | 936 | @Override 937 | public void write(DataOutput out) throws IOException { 938 | throw new UnsupportedOperationException(); 939 | } 940 | 941 | @Override 942 | public void readFields(DataInput in) throws IOException { 943 | throw new UnsupportedOperationException(); 944 | } 945 | }; 946 | } 947 | 948 | } -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/http/client/HttpKerberosAuthenticator.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | package org.apache.hadoop.fs.http.client; 19 | 20 | import org.apache.hadoop.security.authentication.client.Authenticator; 21 | import org.apache.hadoop.security.authentication.client.KerberosAuthenticator; 22 | 23 | /** 24 | * A KerberosAuthenticator subclass that fallback to 25 | * {@link HttpPseudoAuthenticator}. 26 | */ 27 | public class HttpKerberosAuthenticator extends KerberosAuthenticator { 28 | 29 | /** 30 | * Returns the fallback authenticator if the server does not use 31 | * Kerberos SPNEGO HTTP authentication. 32 | * 33 | * @return a {@link HttpPseudoAuthenticator} instance. 34 | */ 35 | @Override 36 | protected Authenticator getFallBackAuthenticator() { 37 | return new HttpPseudoAuthenticator(); 38 | } 39 | } -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/http/client/HttpPseudoAuthenticator.java: -------------------------------------------------------------------------------- 1 | /** 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | package org.apache.hadoop.fs.http.client; 19 | 20 | import java.io.IOException; 21 | 22 | import org.apache.hadoop.security.UserGroupInformation; 23 | import org.apache.hadoop.security.authentication.client.PseudoAuthenticator; 24 | 25 | /** 26 | * A PseudoAuthenticator subclass that uses FileSystemAccess's 27 | * UserGroupInformation to obtain the client user name (the UGI's login user). 28 | */ 29 | public class HttpPseudoAuthenticator extends PseudoAuthenticator { 30 | 31 | /** 32 | * Return the client user name. 33 | * 34 | * @return the client user name. 35 | */ 36 | @Override 37 | protected String getUserName() { 38 | try { 39 | return UserGroupInformation.getLoginUser().getUserName(); 40 | } catch (IOException ex) { 41 | throw new SecurityException("Could not obtain current user, " + ex.getMessage(), ex); 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/main/resources/httpfs.properties: -------------------------------------------------------------------------------- 1 | #httpfs server host ip 2 | host=192.168.230.129 3 | #httpfs server port 4 | port=14000 -------------------------------------------------------------------------------- /src/test/java/com/catt/httpfs/client/httpclient/Demo.java: -------------------------------------------------------------------------------- 1 | package com.catt.httpfs.client.httpclient; 2 | 3 | public class Demo { 4 | public static void main(String[] args) { 5 | HttpFSClient client = new HttpFSClient(); 6 | client.initCookie(); 7 | 8 | // 获取当前用户的目录 9 | client.get("", "op=gethomedirectory"); 10 | 11 | // // 上传文件 12 | // client.put("/test", "op=CREATE&buffersize=1000"); 13 | // client.upload("/test/pom.xml", "op=CREATE&buffersize=1000&data=true", 14 | // "pom.xml"); 15 | // 16 | // // 删除文件 17 | // client.delete("/test2/demo.xml", "op=DELETE"); 18 | // 19 | // // 创建目录 20 | // client.put("/test2/test9", "op=MKDIRS"); 21 | // 22 | // // 读取文件 23 | // client.get("/test/data.txt", 24 | // "op=OPEN&buffersize=10000&data=true",true); 25 | 26 | // 获取文件列表信息 27 | String result = client.get("/test", "op=LISTSTATUS"); 28 | System.out.println(result); 29 | // 处理返回信息 30 | HttpFSUtils.parseResult(result); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/test/java/org/apache/hadoop/fs/http/client/Demo.java: -------------------------------------------------------------------------------- 1 | package org.apache.hadoop.fs.http.client; 2 | 3 | import java.net.URL; 4 | 5 | import org.apache.hadoop.conf.Configuration; 6 | import org.apache.hadoop.fs.FileSystem; 7 | import org.apache.hadoop.fs.Path; 8 | 9 | import com.catt.httpfs.client.utils.HttpFSConf; 10 | 11 | public class Demo { 12 | public static void main(String[] args) throws Exception { 13 | Configuration conf = new Configuration(); 14 | conf.set("fs.defaultFS", "hdfs://localhost.localdomain:8020/"); 15 | 16 | Path path = new Path("/test2/test3"); 17 | FileSystem fs = getHttpFileSystem(); 18 | fs.mkdirs(path); 19 | fs.close(); 20 | fs = FileSystem.get(conf); 21 | System.out.println(fs.exists(path)); 22 | fs.close(); 23 | } 24 | 25 | protected static FileSystem getHttpFileSystem() throws Exception { 26 | Configuration conf = new Configuration(); 27 | conf.set("fs.http.impl", HttpFSFileSystem.class.getName()); 28 | String url = "http://" + HttpFSConf.getHOST() + ":" 29 | + HttpFSConf.getPORT(); 30 | return FileSystem.get(new URL(url).toURI(), conf); 31 | } 32 | } 33 | --------------------------------------------------------------------------------