├── .github └── workflows │ └── maven.yml ├── .gitignore ├── BUILDING.md ├── LICENSE ├── README.md ├── SECURITY.md ├── pom.xml └── src ├── main ├── java │ ├── bandwidth.java │ ├── bucketmetadata.java │ ├── bucketstate.java │ ├── bulkdelete.java │ ├── cloudup.java │ ├── committerinfo.java │ ├── constval.java │ ├── deleteobject.java │ ├── distcpdiag.java │ ├── dux.java │ ├── fetchdt.java │ ├── filestatus.java │ ├── gcscreds.java │ ├── help.java │ ├── iampolicy.java │ ├── jobtokens.java │ ├── list.java │ ├── listmultiparts.java │ ├── listobjects.java │ ├── listversions.java │ ├── localhost.java │ ├── locatefiles.java │ ├── mkbucket.java │ ├── mkcsv.java │ ├── org │ │ └── apache │ │ │ └── hadoop │ │ │ └── fs │ │ │ ├── gs │ │ │ ├── GsCredDiag.java │ │ │ ├── PemReader.java │ │ │ └── package-info.java │ │ │ ├── s3a │ │ │ ├── sdk │ │ │ │ ├── BucketMetadata.java │ │ │ │ ├── BulkDeleteCommand.java │ │ │ │ ├── CsvVersionWriter.java │ │ │ │ ├── DeleteObject.java │ │ │ │ ├── IamPolicy.java │ │ │ │ ├── InternalAccess.java │ │ │ │ ├── ListAndProcessVersionedObjects.java │ │ │ │ ├── ListMultiparts.java │ │ │ │ ├── ListObjects.java │ │ │ │ ├── ListVersions.java │ │ │ │ ├── MkBucket.java │ │ │ │ ├── MultipartProcessor.java │ │ │ │ ├── ObjectListingIterator.java │ │ │ │ ├── Regions.java │ │ │ │ ├── RestoreObject.java │ │ │ │ ├── S3ListingSupport.java │ │ │ │ ├── SessionKeys.java │ │ │ │ ├── SummaryProcessor.java │ │ │ │ ├── Undelete.java │ │ │ │ ├── VersionedFileCopier.java │ │ │ │ └── package-info.java │ │ │ └── sdk2 │ │ │ │ └── package-info.java │ │ │ ├── store │ │ │ ├── CheckStoreProperty.java │ │ │ ├── CommonParameters.java │ │ │ ├── MinMeanMax.java │ │ │ ├── PathCapabilityChecker.java │ │ │ ├── StoreDiagConstants.java │ │ │ ├── StoreDurationInfo.java │ │ │ ├── StoreEntryPoint.java │ │ │ ├── StoreExitCodes.java │ │ │ ├── StoreLambda.java │ │ │ ├── StoreUtils.java │ │ │ ├── abfs │ │ │ │ ├── SafePrefetch.java │ │ │ │ └── package-info.java │ │ │ ├── commands │ │ │ │ ├── Allocator.java │ │ │ │ ├── Bandwidth.java │ │ │ │ ├── BucketState.java │ │ │ │ ├── Command.java │ │ │ │ ├── CommitterInfo.java │ │ │ │ ├── Constval.java │ │ │ │ ├── DirectoryTree.java │ │ │ │ ├── EnvEntry.java │ │ │ │ ├── ExtendedDu.java │ │ │ │ ├── FetchTokens.java │ │ │ │ ├── JobTokens.java │ │ │ │ ├── ListFiles.java │ │ │ │ ├── LocalHost.java │ │ │ │ ├── LocateFiles.java │ │ │ │ ├── PathCapability.java │ │ │ │ ├── PrintStatus.java │ │ │ │ ├── TLSInfo.java │ │ │ │ ├── TarHardened.java │ │ │ │ └── package-info.java │ │ │ ├── diag │ │ │ │ ├── ADLDiagnosticsInfo.java │ │ │ │ ├── AbfsDiagnosticsInfo.java │ │ │ │ ├── CapabilityKeys.java │ │ │ │ ├── DiagUtils.java │ │ │ │ ├── DiagnosticsEntryPoint.java │ │ │ │ ├── DistcpDiag.java │ │ │ │ ├── GCSDiagnosticsInfo.java │ │ │ │ ├── HBossConstants.java │ │ │ │ ├── HDFSDiagnosticsInfo.java │ │ │ │ ├── OptionSets.java │ │ │ │ ├── Printout.java │ │ │ │ ├── S3ADiagnosticsInfo.java │ │ │ │ ├── StoreDiag.java │ │ │ │ ├── StoreDiagException.java │ │ │ │ ├── StoreDiagnosticsInfo.java │ │ │ │ ├── StoreLogExactlyOnce.java │ │ │ │ ├── TemplateDiagnosticsInfo.java │ │ │ │ ├── WasbDiagnosticsInfo.java │ │ │ │ └── package-info.java │ │ │ ├── logging │ │ │ │ ├── IOStatisticsIntegration.java │ │ │ │ ├── Log4JController.java │ │ │ │ ├── LogControl.java │ │ │ │ ├── LogControllerFactory.java │ │ │ │ └── package-info.java │ │ │ ├── s3a │ │ │ │ ├── DiagnosticsAWSCredentialsProvider.java │ │ │ │ ├── S3ASupport.java │ │ │ │ └── package-info.java │ │ │ ├── shim │ │ │ │ ├── APIShim.java │ │ │ │ ├── IsImplemented.java │ │ │ │ ├── impl │ │ │ │ │ ├── AbstractAPIShim.java │ │ │ │ │ ├── Invocation.java │ │ │ │ │ ├── ShimReflectionSupport.java │ │ │ │ │ └── package-info.java │ │ │ │ └── package-info.java │ │ │ └── test │ │ │ │ ├── AbstractS3AStoreTest.java │ │ │ │ ├── S3AStoreContract.java │ │ │ │ └── package-info.java │ │ │ └── tools │ │ │ ├── cloudup │ │ │ ├── Cloudup.java │ │ │ ├── NanoTimer.java │ │ │ ├── UploadEntry.java │ │ │ └── package-info.java │ │ │ └── csv │ │ │ ├── CsvWriterWithCRC.java │ │ │ ├── MkCSV.java │ │ │ ├── SimpleCsvWriter.java │ │ │ └── package-info.java │ ├── pathcapability.java │ ├── regions.java │ ├── regions2.java │ ├── restore.java │ ├── safeprefetch.java │ ├── sessionkeys.java │ ├── storediag.java │ ├── tarhardened.java │ ├── tlsinfo.java │ └── undelete.java ├── resources │ ├── META-INF │ │ └── MANIFEST.MF │ └── cloudstore │ │ ├── diagnostics.xml │ │ ├── required.txt │ │ └── s3a.xml └── site │ ├── bandwidth.md │ ├── bucketmetadata.md │ ├── bulkdelete.md │ ├── cloudup.md │ ├── committerinfo.md │ ├── constval.md │ ├── diagnosticsawscredentialsprovider.md │ ├── fetchdt.md │ ├── locatefiles.md │ ├── mkbucket.md │ ├── mkcsv.md │ ├── safeprefetch.md │ ├── sdk.md │ ├── sessionkey.md │ ├── storediag.md │ ├── tarhardened.md │ └── versioned-objects.md └── test ├── java └── org │ └── apache │ └── hadoop │ ├── fs │ ├── s3a │ │ └── cloudup │ │ │ └── ITestS3ACloudup.java │ └── store │ │ ├── commands │ │ ├── FieldsForTesting.java │ │ └── TestConstval.java │ │ └── logging │ │ └── TestLog4JController.java │ └── tools │ ├── cloudup │ └── ITestLocalCloudup.java │ └── store │ ├── StoreTestUtils.java │ └── TestDiagUtils.java └── resources ├── core-site.xml ├── log4j.properties └── logback-test.xml /.github/workflows/maven.yml: -------------------------------------------------------------------------------- 1 | # This workflow will build a Java project with Maven 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven 3 | 4 | name: Java CI with Maven 5 | 6 | on: 7 | push: 8 | branches: [ trunk ] 9 | pull_request: 10 | branches: [ trunk ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Set up JDK 1.8 20 | uses: actions/setup-java@v1 21 | with: 22 | java-version: 1.8 23 | - name: Build with Maven 24 | run: mvn clean install -Pextra && mvn install -Psdk2 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled class file 2 | *.class 3 | 4 | # Log file 5 | *.log 6 | 7 | # BlueJ files 8 | *.ctxt 9 | 10 | # Mobile Tools for Java (J2ME) 11 | .mtj.tmp/ 12 | 13 | # Package Files # 14 | *.jar 15 | *.war 16 | *.ear 17 | *.zip 18 | *.tar.gz 19 | *.rar 20 | 21 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 22 | hs_err_pid* 23 | target/ 24 | 25 | #idea 26 | .idea/ 27 | *.iml 28 | .DS_Store 29 | *.iml 30 | *.ipr 31 | *.iws 32 | build.properties 33 | atlassian-ide-plugin.xml 34 | auth-keys.xml 35 | .junit 36 | 37 | # secrets 38 | auth-keys.xml -------------------------------------------------------------------------------- /BUILDING.md: -------------------------------------------------------------------------------- 1 | 14 | 15 | # Building 16 | 17 | With maven, with profiles for AWS java v1 and v2 SDK. 18 | 19 | To build a production release 20 | 1. Use java8 21 | 2. compile against a shipping hadoop version (see the profiles) 22 | 23 | Joint build 24 | ```bash 25 | mvn clean install 26 | ``` 27 | 28 | ## Releasing 29 | 30 | To publish the release use the gui or the github command line through the `fish` shell. 31 | 32 | ```bash 33 | mvn clean install 34 | set -gx now (date '+%Y-%m-%d-%H.%M'); echo [$now] 35 | git add .; git status 36 | git commit -S --allow-empty -m "release $now"; git push 37 | gh release create tag-release-$now -t release-$now -n "release of $now" -d target/cloudstore-1.0.jar 38 | # then go to the web ui to review and finalize the release 39 | ``` 40 | 41 | * If a new release is made the same day, remember to create a new tag. 42 | * The version `cloudstore-1.0.jar` is always used, not just from laziness but because it allows 43 | for bash scripts to always be able to fetch the latest version through curl then execute it. 44 | 45 | 46 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | 14 | 15 | # Security Policy 16 | 17 | ## Supported Versions 18 | 19 | None. You are on your own. Sorry. 20 | 21 | (this isn't quite true...so do ask: bugs will be fixed on a best-effort basis) 22 | 23 | ## Reporting a Vulnerability 24 | 25 | * file an issue 26 | * if you have a fix, file a PR 27 | * if the issue is in hadoop, file an apache JIRA. 28 | * if the issue is in an transient dependency of hadoop, see 29 | [Transitive Issues](https://steveloughran.blogspot.com/2022/08/transitive-issues.html) 30 | then solve the entire software-versioning problem in java. please. 31 | 32 | This library is actually written by Hadoop committers at cloudera; 33 | if you are using Apache Hadoop -you are already running our code. 34 | 35 | The builds take place on our local machines, reading in all dependencies 36 | from our private maven artifact server -the same one used for all 37 | cloudera releases. 38 | 39 | The maven binaries used are pulled direct from apache, with 40 | their GPG signatures checked before installation. 41 | 42 | This means the risk of supply chain attack or deliberate 43 | malicious code is pretty low. -------------------------------------------------------------------------------- /src/main/java/bandwidth.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Bandwidth; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class bandwidth extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | Bandwidth.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("bandwidth", "measure network bandwidth"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/bucketmetadata.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.BucketMetadata; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class bucketmetadata extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | BucketMetadata.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("bucketmetadata", "retrieve bucket metadata"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/bucketstate.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.BucketState; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class bucketstate extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | BucketState.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("bucketstate","prints the AWS bucket state"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/bulkdelete.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.BulkDeleteCommand; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class bulkdelete extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | BulkDeleteCommand.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("bulkdelete", "bulk delete objects/files"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/cloudup.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.tools.cloudup.Cloudup; 21 | 22 | public class cloudup extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | Cloudup.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("cloudup\t","copies to/from cloud storage"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/committerinfo.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.CommitterInfo; 21 | 22 | public class committerinfo extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | CommitterInfo.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("committerinfo", "Print committer information"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/constval.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.Constval; 21 | 22 | public class constval extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | Constval.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("constval", "look up a constant value in a class"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/deleteobject.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.DeleteObject; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class deleteobject extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | DeleteObject.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("deleteobject", "Delete an S3 object"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/distcpdiag.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.diag.DistcpDiag; 21 | 22 | public class distcpdiag extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | DistcpDiag.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("distcpdiag", "Print distcp diagnostics"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/dux.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.ExtendedDu; 21 | 22 | public class dux extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | ExtendedDu.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("dux\t","extended du"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/fetchdt.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.FetchTokens; 21 | 22 | public class fetchdt extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | FetchTokens.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("fetchdt\t", "fetch delegation tokens"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/filestatus.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.PrintStatus; 21 | 22 | public class filestatus extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | PrintStatus.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("filestatus", "print file statuses"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/gcscreds.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.gs.GsCredDiag; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class gcscreds extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | GsCredDiag .main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("gcscreds", "credential diagnostics for GCS. Warning: logs secrets"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/help.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | 21 | /** 22 | * Help command: list the public commands. 23 | */ 24 | public class help extends Command { 25 | 26 | /** 27 | * When adding new entries here, use alphabetical order. 28 | * @param args command line args 29 | */ 30 | public static void main(String[] args) { 31 | println("Cloudstore"); 32 | println("=========="); 33 | println(""); 34 | bandwidth.help(); 35 | bulkdelete.help(); 36 | cloudup.help(); 37 | committerinfo.help(); 38 | constval.help(); 39 | distcpdiag.help(); 40 | dux.help(); 41 | fetchdt.help(); 42 | filestatus.help(); 43 | jobtokens.help(); 44 | list.help(); 45 | locatefiles.help(); 46 | localhost.help(); 47 | mkcsv.help(); 48 | pathcapability.help(); 49 | storediag.help(); 50 | tarhardened.help(); 51 | tlsinfo.help(); 52 | 53 | // extras must not refer to the optional classes. 54 | println(""); 55 | println(""); 56 | println("AWS V2 SDK-only Commands"); 57 | println("========================"); 58 | 59 | println("%nRequires an S3A connector built with the V2 AWS SDK"); 60 | println(""); 61 | bucketmetadata.help(); 62 | bucketstate.help(); 63 | deleteobject.help(); 64 | gcscreds.help(); 65 | iampolicy.help(); 66 | listobjects.help(); 67 | listversions.help(); 68 | mkbucket.help(); 69 | regions.help(); 70 | restore.help(); 71 | sessionkeys.help(); 72 | 73 | println(""); 74 | println("See https://github.com/steveloughran/cloudstore"); 75 | 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/main/java/iampolicy.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.IamPolicy; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class iampolicy extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | IamPolicy.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("iampolicy", "generate IAM policy"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/jobtokens.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.JobTokens; 21 | 22 | public class jobtokens extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | JobTokens.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("jobtokens", "fetch job tokens"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/list.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.ListFiles; 21 | 22 | public class list extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | ListFiles.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("list ", "list files"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/listmultiparts.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.ListMultiparts; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class listmultiparts extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | ListMultiparts.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("listmultiparts", "list multipart uploads to CSV"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/listobjects.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.ListObjects; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class listobjects extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | ListObjects.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("listobjects", "list S3 objects and their translated statuses"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/listversions.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.ListVersions; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class listversions extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | ListVersions.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("listversions", "list all versions of S3 objects under a path"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/localhost.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.LocalHost; 21 | 22 | public class localhost extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | LocalHost.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("localhost", "print local host details"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/locatefiles.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.LocateFiles; 21 | 22 | public class locatefiles extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | LocateFiles.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("locatefiles", "locate files"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/mkbucket.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.MkBucket; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class mkbucket extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | MkBucket.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("mkbucket", "Create an S3 bucket"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/mkcsv.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.tools.csv.MkCSV; 21 | 22 | public class mkcsv extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | MkCSV.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("mkcsv ", "generate CSV file"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/gs/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.gs; -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk/BucketMetadata.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.sdk; 20 | 21 | import java.util.List; 22 | 23 | import software.amazon.awssdk.services.s3.model.HeadBucketResponse; 24 | 25 | import org.apache.hadoop.conf.Configuration; 26 | import org.apache.hadoop.fs.FileSystem; 27 | import org.apache.hadoop.fs.Path; 28 | import org.apache.hadoop.fs.s3a.S3AFileSystem; 29 | import org.apache.hadoop.fs.s3a.S3AInternals; 30 | import org.apache.hadoop.fs.store.StoreEntryPoint; 31 | import org.apache.hadoop.util.ToolRunner; 32 | 33 | /** 34 | * Debug bucket settings; v2 sdk 35 | */ 36 | public class BucketMetadata extends StoreEntryPoint { 37 | 38 | public static final String USAGE 39 | = "Usage: bucketmetadata [-debug] "; 40 | 41 | public BucketMetadata() { 42 | createCommandFormat(1, 1); 43 | } 44 | 45 | @Override 46 | public int run(String[] args) throws Exception { 47 | List argList = processArgs(args, 1, 1, USAGE); 48 | final Configuration conf = createPreconfiguredConfig(); 49 | 50 | // path on the CLI 51 | Path path = new Path(argList.get(0)); 52 | heading("Getting bucket info for %s", path); 53 | FileSystem fs = path.getFileSystem(conf); 54 | if (!(fs instanceof S3AFileSystem)) { 55 | println("Filesystem for path %s is not an S3A FileSystem %s", 56 | path, fs); 57 | return -1; 58 | } 59 | S3AFileSystem s3a = (S3AFileSystem) fs; 60 | final S3AInternals internals = s3a.getS3AInternals(); 61 | final HeadBucketResponse response = internals.getBucketMetadata(); 62 | 63 | println("Bucket metadata from S3"); 64 | println( 65 | "Region %s%nLocation Name %s%nLocation Type %s%n", 66 | response.bucketRegion(), 67 | response.bucketLocationName(), 68 | response.bucketLocationTypeAsString() 69 | ); 70 | 71 | return 0; 72 | } 73 | 74 | 75 | /** 76 | * Execute the command, return the result or throw an exception, 77 | * as appropriate. 78 | * @param args argument varags. 79 | * @return return code 80 | * @throws Exception failure 81 | */ 82 | public static int exec(String... args) throws Exception { 83 | return ToolRunner.run(new BucketMetadata(), args); 84 | } 85 | 86 | /** 87 | * Main entry point. Calls {@code System.exit()} on all execution paths. 88 | * @param args argument list 89 | */ 90 | public static void main(String[] args) { 91 | try { 92 | exit(exec(args), ""); 93 | } catch (Throwable e) { 94 | exitOnThrowable(e); 95 | } 96 | } 97 | 98 | } 99 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk/CsvVersionWriter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.sdk; 20 | 21 | import java.io.IOException; 22 | import java.io.OutputStream; 23 | import java.text.DateFormat; 24 | import java.text.SimpleDateFormat; 25 | import java.time.Instant; 26 | 27 | import software.amazon.awssdk.services.s3.model.ObjectVersion; 28 | 29 | import org.apache.hadoop.fs.Path; 30 | import org.apache.hadoop.fs.tools.csv.SimpleCsvWriter; 31 | 32 | import static org.apache.hadoop.fs.s3a.sdk.S3ListingSupport.isDirMarker; 33 | 34 | /** 35 | * write to csv; pulled out to make writing to avro etc easier in future. 36 | */ 37 | final class CsvVersionWriter extends ListAndProcessVersionedObjects.NoopProcessor { 38 | 39 | private final SimpleCsvWriter csv; 40 | 41 | private final DateFormat df = new SimpleDateFormat("yyyy-MM-ddZhh:mm:ss"); 42 | 43 | private final boolean logDirs; 44 | 45 | private final boolean logDeleted; 46 | 47 | long index = 0; 48 | 49 | CsvVersionWriter( 50 | final OutputStream out, 51 | final boolean closeOutput, 52 | String separator, 53 | final boolean logDirs, 54 | final boolean logDeleted) throws 55 | IOException { 56 | this.logDirs = logDirs; 57 | this.logDeleted = logDeleted; 58 | csv = new SimpleCsvWriter(out, separator, "\n", true, closeOutput); 59 | csv.columns( 60 | "index", 61 | "key", 62 | "path", 63 | "restore", 64 | "latest", 65 | "size", 66 | "tombstone", 67 | "directory", 68 | "date", 69 | "timestamp", 70 | "version", 71 | "etag"); 72 | csv.newline(); 73 | } 74 | 75 | @Override 76 | public void close() throws IOException { 77 | csv.close(); 78 | } 79 | 80 | public boolean process(ObjectVersion summary, Path path, final boolean deleteMarker) throws IOException { 81 | final boolean dirMarker = isDirMarker(summary); 82 | if (dirMarker && !logDirs) { 83 | return false; 84 | } 85 | if (deleteMarker && !logDeleted) { 86 | return false; 87 | } 88 | csv.columnL(++index); 89 | csv.column(summary.key()); 90 | csv.column(path); 91 | csv.columnB(!deleteMarker && !dirMarker); 92 | csv.columnB(summary.isLatest()); 93 | csv.columnL(summary.size()); 94 | csv.columnB(deleteMarker); 95 | csv.columnB(dirMarker); 96 | final Instant lastModified = summary.lastModified(); 97 | csv.column(df.format(lastModified)); 98 | csv.columnL(lastModified.getEpochSecond()); 99 | final String versionId = summary.versionId(); 100 | csv.column(versionId); 101 | csv.column(summary.eTag()); 102 | csv.newline(); 103 | return true; 104 | } 105 | 106 | private long getIndex() { 107 | return index; 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk/DeleteObject.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.sdk; 20 | 21 | 22 | import java.util.List; 23 | 24 | import org.slf4j.Logger; 25 | import org.slf4j.LoggerFactory; 26 | import software.amazon.awssdk.services.s3.S3Client; 27 | import software.amazon.awssdk.services.s3.model.DeleteObjectRequest; 28 | 29 | import org.apache.hadoop.conf.Configuration; 30 | import org.apache.hadoop.fs.Path; 31 | import org.apache.hadoop.fs.s3a.Invoker; 32 | import org.apache.hadoop.fs.s3a.S3AFileSystem; 33 | import org.apache.hadoop.fs.store.StoreEntryPoint; 34 | import org.apache.hadoop.util.ToolRunner; 35 | 36 | /** 37 | * Deletes the objects on the command line 38 | */ 39 | public class DeleteObject extends StoreEntryPoint { 40 | 41 | private static final Logger LOG = LoggerFactory.getLogger(DeleteObject.class); 42 | 43 | public static final String USAGE 44 | = "Usage: deleteobject "; 45 | 46 | public DeleteObject() { 47 | createCommandFormat(1, 1); 48 | } 49 | 50 | 51 | @Override 52 | public int run(String[] args) throws Exception { 53 | List paths = processArgs(args, 1, 1, USAGE); 54 | final Configuration conf = createPreconfiguredConfig(); 55 | 56 | final Path source = new Path(paths.get(0)); 57 | S3AFileSystem fs = (S3AFileSystem) source.getFileSystem(conf); 58 | S3Client s3 = fs.getS3AInternals().getAmazonS3Client("DeleteObjects"); 59 | Invoker.once("delete", source.toString(), () -> 60 | s3.deleteObject( 61 | DeleteObjectRequest.builder() 62 | .bucket(fs.getBucket()) 63 | .key(fs.pathToKey(source)) 64 | .build())); 65 | return 0; 66 | } 67 | 68 | /** 69 | * Execute the command, return the result or throw an exception, 70 | * as appropriate. 71 | * @param args argument varags. 72 | * @return return code 73 | * @throws Exception failure 74 | */ 75 | public static int exec(String... args) throws Exception { 76 | return ToolRunner.run(new DeleteObject(), args); 77 | } 78 | 79 | /** 80 | * Main entry point. Calls {@code System.exit()} on all execution paths. 81 | * @param args argument list 82 | */ 83 | public static void main(String[] args) { 84 | try { 85 | exit(exec(args), ""); 86 | } catch (Throwable e) { 87 | exitOnThrowable(e); 88 | } 89 | } 90 | 91 | } 92 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk/IamPolicy.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.sdk; 20 | 21 | 22 | import java.util.EnumSet; 23 | import java.util.List; 24 | 25 | import org.slf4j.Logger; 26 | import org.slf4j.LoggerFactory; 27 | 28 | import org.apache.hadoop.conf.Configuration; 29 | import org.apache.hadoop.fs.Path; 30 | import org.apache.hadoop.fs.s3a.S3AFileSystem; 31 | import org.apache.hadoop.fs.s3a.auth.RoleModel; 32 | import org.apache.hadoop.fs.s3a.auth.delegation.AWSPolicyProvider; 33 | import org.apache.hadoop.fs.store.StoreEntryPoint; 34 | import org.apache.hadoop.util.ToolRunner; 35 | 36 | public class IamPolicy extends StoreEntryPoint { 37 | 38 | private static final Logger LOG = LoggerFactory.getLogger(IamPolicy.class); 39 | 40 | public static final String USAGE 41 | = "Usage: iampolicy "; 42 | 43 | public IamPolicy() { 44 | createCommandFormat(1, 1); 45 | } 46 | 47 | 48 | @Override 49 | public int run(String[] args) throws Exception { 50 | List paths = processArgs(args, 1, 1, USAGE); 51 | 52 | final Configuration conf = createPreconfiguredConfig(); 53 | 54 | final Path source = new Path(paths.get(0)); 55 | S3AFileSystem fs = (S3AFileSystem) source.getFileSystem(conf); 56 | EnumSet access 57 | = EnumSet.of( 58 | AWSPolicyProvider.AccessLevel.READ, 59 | AWSPolicyProvider.AccessLevel.WRITE, 60 | AWSPolicyProvider.AccessLevel.ADMIN); 61 | 62 | List rules = fs.listAWSPolicyRules(access); 63 | String ruleset = new RoleModel().toJson(new RoleModel.Policy(rules)); 64 | println(ruleset); 65 | return 0; 66 | } 67 | 68 | /** 69 | * Execute the command, return the result or throw an exception, 70 | * as appropriate. 71 | * @param args argument varags. 72 | * @return return code 73 | * @throws Exception failure 74 | */ 75 | public static int exec(String... args) throws Exception { 76 | return ToolRunner.run(new IamPolicy(), args); 77 | } 78 | 79 | /** 80 | * Main entry point. Calls {@code System.exit()} on all execution paths. 81 | * @param args argument list 82 | */ 83 | public static void main(String[] args) { 84 | try { 85 | exit(exec(args), ""); 86 | } catch (Throwable e) { 87 | exitOnThrowable(e); 88 | } 89 | } 90 | 91 | } 92 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk/InternalAccess.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.sdk; 20 | 21 | import software.amazon.awssdk.services.s3.S3Client; 22 | 23 | import org.apache.hadoop.fs.s3a.S3AFileSystem; 24 | 25 | import static com.google.common.base.Preconditions.checkNotNull; 26 | 27 | /** 28 | * Internal accessor to S3 state 29 | */ 30 | public class InternalAccess { 31 | 32 | private final S3AFileSystem filesystem; 33 | 34 | public InternalAccess(final S3AFileSystem filesystem) { 35 | this.filesystem = checkNotNull(filesystem); 36 | } 37 | 38 | /** 39 | * Returns the S3 client used by this filesystem. 40 | * This is for internal use within the S3A code itself. 41 | * @return AmazonS3Client 42 | */ 43 | public S3Client getAmazonS3Client() { 44 | return filesystem.getS3AInternals().getAmazonS3Client("Diagnostics"); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk/MkBucket.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.sdk; 20 | 21 | 22 | import java.util.List; 23 | 24 | import org.slf4j.Logger; 25 | import org.slf4j.LoggerFactory; 26 | import software.amazon.awssdk.services.s3.S3Client; 27 | import software.amazon.awssdk.services.s3.model.CreateBucketConfiguration; 28 | import software.amazon.awssdk.services.s3.model.CreateBucketRequest; 29 | import software.amazon.awssdk.services.s3.model.CreateBucketResponse; 30 | 31 | import org.apache.hadoop.conf.Configuration; 32 | import org.apache.hadoop.fs.Path; 33 | import org.apache.hadoop.fs.s3a.Invoker; 34 | import org.apache.hadoop.fs.s3a.S3AFileSystem; 35 | import org.apache.hadoop.fs.store.StoreDurationInfo; 36 | import org.apache.hadoop.fs.store.StoreEntryPoint; 37 | import org.apache.hadoop.util.ToolRunner; 38 | 39 | import static org.apache.hadoop.fs.s3a.Constants.S3A_BUCKET_PROBE; 40 | import static org.apache.hadoop.fs.store.StoreExitCodes.E_USAGE; 41 | import static org.apache.hadoop.fs.store.diag.S3ADiagnosticsInfo.FS_S3A_AUDIT_REJECT_OUT_OF_SPAN_OPERATIONS; 42 | 43 | /** 44 | * Create the s3 bucket. 45 | */ 46 | public class MkBucket extends StoreEntryPoint { 47 | 48 | private static final Logger LOG = LoggerFactory.getLogger(MkBucket.class); 49 | 50 | public static final String USAGE 51 | = "Usage: mkbucket "; 52 | 53 | public MkBucket() { 54 | createCommandFormat(2, 2); 55 | } 56 | 57 | 58 | @Override 59 | public int run(String[] args) throws Exception { 60 | List paths = parseArgs(args); 61 | if (paths.size() != 2) { 62 | errorln(USAGE); 63 | return E_USAGE; 64 | } 65 | 66 | final Configuration conf = createPreconfiguredConfig(); 67 | conf.setInt(S3A_BUCKET_PROBE, 0); 68 | conf.setBoolean(FS_S3A_AUDIT_REJECT_OUT_OF_SPAN_OPERATIONS, false); 69 | 70 | final String region = paths.get(0); 71 | final String bucketPath = paths.get(1); 72 | final Path source = new Path(bucketPath); 73 | S3AFileSystem fs = (S3AFileSystem) source.getFileSystem(conf); 74 | final S3Client client = fs.getS3AInternals().getAmazonS3Client("mkbucket"); 75 | final String bucketName = source.toUri().getHost(); 76 | final CreateBucketRequest request = 77 | CreateBucketRequest.builder() 78 | .bucket(bucketName) 79 | .createBucketConfiguration( 80 | CreateBucketConfiguration.builder() 81 | .locationConstraint(region) 82 | .build()) 83 | .build(); 84 | CreateBucketResponse bucket; 85 | try (StoreDurationInfo ignored = new StoreDurationInfo(LOG, 86 | "Creating bucket %s", bucketName)) { 87 | bucket = Invoker.once("delete", source.toString(), () -> 88 | client.createBucket(request)); 89 | } 90 | println("Created bucket %s", bucket); 91 | 92 | 93 | return 0; 94 | } 95 | 96 | /** 97 | * Main entry point. Calls {@code System.exit()} on all execution paths. 98 | * @param args argument list 99 | */ 100 | public static void main(String[] args) { 101 | try { 102 | exit(ToolRunner.run(new MkBucket(), args), ""); 103 | } catch (Throwable e) { 104 | exitOnThrowable(e); 105 | } 106 | } 107 | 108 | } 109 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk/MultipartProcessor.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.sdk; 20 | 21 | import java.io.IOException; 22 | import java.util.NoSuchElementException; 23 | 24 | import software.amazon.awssdk.services.s3.S3Client; 25 | import software.amazon.awssdk.services.s3.model.ListPartsRequest; 26 | import software.amazon.awssdk.services.s3.model.ListPartsResponse; 27 | 28 | import org.apache.hadoop.fs.RemoteIterator; 29 | import org.apache.hadoop.fs.s3a.S3AFileSystem; 30 | 31 | public class MultipartProcessor { 32 | 33 | private final S3AFileSystem fs; 34 | 35 | private final S3Client amazonS3; 36 | 37 | 38 | public MultipartProcessor(final S3AFileSystem fs) { 39 | this.fs = fs; 40 | amazonS3 = fs.getS3AInternals().getAmazonS3Client("api"); 41 | } 42 | 43 | public PartIterator partListing(String key, final String id) { 44 | return new PartIterator(key, id); 45 | } 46 | 47 | 48 | public class PartIterator implements RemoteIterator { 49 | 50 | private final String key; 51 | 52 | private final String id; 53 | 54 | private boolean firstListing = true; 55 | 56 | private ListPartsResponse partListing; 57 | 58 | private ListPartsRequest request; 59 | 60 | PartIterator(final String key, final String id) { 61 | this.key = key; 62 | this.id = id; 63 | } 64 | 65 | public void listFirst() { 66 | request = ListPartsRequest.builder() 67 | .bucket(fs.getBucket()) 68 | .key(key) 69 | .uploadId(id) 70 | .build(); 71 | partListing = amazonS3.listParts(request); 72 | } 73 | 74 | public void listNext() { 75 | request = request.toBuilder() 76 | .partNumberMarker(partListing.nextPartNumberMarker()) 77 | .build(); 78 | partListing = amazonS3.listParts(request); 79 | } 80 | 81 | @Override 82 | public boolean hasNext() throws IOException { 83 | return firstListing || (partListing != null && partListing.isTruncated()); 84 | 85 | } 86 | 87 | @Override 88 | public ListPartsResponse next() throws IOException { 89 | if (!hasNext()) { 90 | throw new NoSuchElementException(); 91 | } 92 | if (firstListing) { 93 | firstListing = false; 94 | listFirst(); 95 | } else { 96 | // not first listing, so there's a valid, truncated part 97 | listNext(); 98 | } 99 | return null; 100 | } 101 | } 102 | 103 | 104 | } 105 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk/RestoreObject.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.sdk; 20 | 21 | 22 | import java.util.List; 23 | 24 | import org.slf4j.Logger; 25 | import org.slf4j.LoggerFactory; 26 | 27 | import org.apache.hadoop.conf.Configuration; 28 | import org.apache.hadoop.fs.Path; 29 | import org.apache.hadoop.fs.s3a.S3AFileSystem; 30 | import org.apache.hadoop.fs.store.StoreDurationInfo; 31 | import org.apache.hadoop.fs.store.StoreEntryPoint; 32 | import org.apache.hadoop.util.ToolRunner; 33 | 34 | import static org.apache.hadoop.fs.store.StoreExitCodes.E_USAGE; 35 | 36 | /** 37 | * Deletes the objects on the command line 38 | */ 39 | public class RestoreObject extends StoreEntryPoint { 40 | 41 | private static final Logger LOG = LoggerFactory.getLogger(RestoreObject.class); 42 | 43 | public static final String USAGE 44 | = "Usage: restore [-verbose] "; 45 | 46 | public RestoreObject() { 47 | createCommandFormat(3, 3); 48 | } 49 | 50 | 51 | @Override 52 | public int run(String[] args) throws Exception { 53 | List paths = parseArgs(args); 54 | if (paths.size() != 3) { 55 | errorln(USAGE); 56 | return E_USAGE; 57 | } 58 | 59 | final Configuration conf = createPreconfiguredConfig(); 60 | 61 | final Path source = new Path(paths.get(0)); 62 | final String version = paths.get(1); 63 | final S3AFileSystem fs = (S3AFileSystem) source.getFileSystem(conf); 64 | final Path src = fs.makeQualified(source); 65 | final Path dst = fs.makeQualified(new Path(paths.get(2))); 66 | println("restoring %s @ %s to %s", 67 | src, version, dst); 68 | long l; 69 | try (VersionedFileCopier copier = new VersionedFileCopier(fs); 70 | StoreDurationInfo d = new StoreDurationInfo(getOut(), "restore")) { 71 | l = copier.copy(fs.pathToKey(src), version, fs.pathToKey(dst)); 72 | } 73 | println("Restored object of size %,d bytes to %s%n", l, dst); 74 | 75 | return 0; 76 | } 77 | 78 | /** 79 | * Execute the command, return the result or throw an exception, 80 | * as appropriate. 81 | * @param args argument varags. 82 | * @return return code 83 | * @throws Exception failure 84 | */ 85 | public static int exec(String... args) throws Exception { 86 | return ToolRunner.run(new RestoreObject(), args); 87 | } 88 | 89 | /** 90 | * Main entry point. Calls {@code System.exit()} on all execution paths. 91 | * @param args argument list 92 | */ 93 | public static void main(String[] args) { 94 | try { 95 | exit(exec(args), ""); 96 | } catch (Throwable e) { 97 | exitOnThrowable(e); 98 | } 99 | } 100 | 101 | } 102 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk/SummaryProcessor.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.sdk; 20 | 21 | import java.io.Closeable; 22 | import java.io.IOException; 23 | 24 | import software.amazon.awssdk.services.s3.model.DeleteMarkerEntry; 25 | import software.amazon.awssdk.services.s3.model.ObjectVersion; 26 | 27 | import org.apache.hadoop.fs.Path; 28 | 29 | interface SummaryProcessor extends Closeable { 30 | 31 | boolean process(ObjectVersion summary, Path path, final boolean isDeleteMarker) 32 | throws IOException; 33 | 34 | default boolean processTombstone(Path path, DeleteMarkerEntry tombstone) throws IOException { 35 | return false; 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk/VersionedFileCopier.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.sdk; 20 | 21 | import java.io.Closeable; 22 | import java.io.IOException; 23 | 24 | import software.amazon.awssdk.services.s3.S3Client; 25 | import software.amazon.awssdk.services.s3.model.CopyObjectRequest; 26 | import software.amazon.awssdk.services.s3.model.CopyObjectResponse; 27 | import software.amazon.awssdk.services.s3.model.HeadObjectRequest; 28 | import software.amazon.awssdk.services.s3.model.HeadObjectResponse; 29 | 30 | import org.apache.hadoop.conf.Configuration; 31 | import org.apache.hadoop.fs.s3a.Invoker; 32 | import org.apache.hadoop.fs.s3a.S3AFileSystem; 33 | import org.apache.hadoop.fs.s3a.api.RequestFactory; 34 | import org.apache.hadoop.fs.s3a.audit.AuditManagerS3A; 35 | import org.apache.hadoop.fs.s3a.impl.StoreContext; 36 | 37 | /** 38 | * Support for copying versioned files to different locations in the same bucket. 39 | * This code uses S3Auditing and other @private apis which came with 40 | * HADOOP-17511. Add an Audit plugin point for S3A auditing/context 41 | * then constructs a new TransferManager -the one in the s3afs is inaccessible. 42 | */ 43 | public class VersionedFileCopier implements Closeable { 44 | 45 | private final Configuration conf; 46 | 47 | private final S3AFileSystem fs; 48 | 49 | private final RequestFactory requestFactory; 50 | 51 | private final StoreContext storeContext; 52 | 53 | private final AuditManagerS3A auditManager; 54 | 55 | private final S3Client s3; 56 | 57 | private final Invoker invoker; 58 | 59 | public VersionedFileCopier(final S3AFileSystem fs) { 60 | this.fs = fs; 61 | this.s3 = fs.getS3AInternals().getAmazonS3Client("VersionedFileCopier"); 62 | this.storeContext = fs.createStoreContext(); 63 | this.invoker = storeContext.getInvoker(); 64 | this.conf = storeContext.getConfiguration(); 65 | this.requestFactory = storeContext.getRequestFactory(); 66 | this.auditManager = fs.getAuditManager(); 67 | 68 | 69 | } 70 | 71 | 72 | public RequestFactory getRequestFactory() { 73 | return requestFactory; 74 | } 75 | 76 | @Override 77 | public void close() throws IOException { 78 | } 79 | 80 | /** 81 | * Copy an object. 82 | * @param sourceKey source 83 | * @param version source version 84 | * @param destKey dest in same bucket 85 | * @return bytes copied 86 | * @throws IOException failure 87 | */ 88 | long copy(String sourceKey, String version, String destKey) throws IOException { 89 | 90 | String action = String.format("copy %s @ %s to %s", sourceKey, version, destKey); 91 | HeadObjectRequest head = requestFactory.newHeadObjectRequestBuilder(sourceKey) 92 | .versionId(version) 93 | .build(); 94 | final HeadObjectResponse srcom = 95 | invoker.retry("HEAD @" + version, 96 | sourceKey, true, () -> s3.headObject(head)); 97 | 98 | CopyObjectRequest copyObjectRequest = 99 | requestFactory.newCopyObjectRequestBuilder(sourceKey, destKey, srcom) 100 | .sourceVersionId(version).build(); 101 | 102 | 103 | CopyObjectResponse response = invoker.retry(action, sourceKey, true, () -> { 104 | return s3.copyObject(copyObjectRequest); 105 | }); 106 | return srcom.contentLength(); 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | /** 20 | * Low level SDK code. 21 | */ 22 | package org.apache.hadoop.fs.s3a.sdk; -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/s3a/sdk2/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.sdk2; -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/CheckStoreProperty.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store; 20 | 21 | import java.io.PrintStream; 22 | import java.util.List; 23 | 24 | import org.slf4j.Logger; 25 | import org.slf4j.LoggerFactory; 26 | 27 | import org.apache.hadoop.conf.Configuration; 28 | import org.apache.hadoop.fs.FileSystem; 29 | import org.apache.hadoop.fs.Path; 30 | import org.apache.hadoop.fs.shell.CommandFormat; 31 | import org.apache.hadoop.util.ToolRunner; 32 | 33 | public class CheckStoreProperty extends StoreEntryPoint { 34 | 35 | private static final Logger LOG = LoggerFactory.getLogger( 36 | CheckStoreProperty.class); 37 | 38 | protected CommandFormat commandFormat = new CommandFormat(0, 39 | Integer.MAX_VALUE); 40 | 41 | static final String USAGE = "Usage: CheckStoreProperty "; 42 | 43 | 44 | @Override 45 | public final int run(String[] args) throws Exception { 46 | return run(args, System.out); 47 | } 48 | 49 | public int run(String[] args, PrintStream stream) throws Exception { 50 | setOut(stream); 51 | List argList = processArgs(args, 3, 3, USAGE); 52 | // path on the CLI 53 | String pathString = argList.get(0); 54 | if (!pathString.endsWith("/")) { 55 | pathString = pathString + "/"; 56 | } 57 | Path path = new Path(pathString); 58 | Configuration conf = new Configuration(true); 59 | FileSystem fs = path.getFileSystem(conf); 60 | Configuration fsConf = fs.getConf(); 61 | 62 | String key = argList.get(1); 63 | String expected = argList.get(2); 64 | 65 | String actual = fsConf.getTrimmed(key); 66 | if (!expected.equals(actual)) { 67 | println("Expected option %s of filesystem %s to be \"%s\", but was \"%s\"", 68 | path, key, expected, actual); 69 | return -1; 70 | } else { 71 | println("Value of %s for %s is as expected: %s", 72 | key, path, expected); 73 | return 0; 74 | } 75 | } 76 | 77 | /** 78 | * Execute the command, return the result or throw an exception, 79 | * as appropriate. 80 | * @param args argument varags. 81 | * @return return code 82 | * @throws Exception failure 83 | */ 84 | public static int exec(String... args) throws Exception { 85 | return ToolRunner.run(new CheckStoreProperty(), args); 86 | } 87 | 88 | /** 89 | * Main entry point. Calls {@code System.exit()} on all execution paths. 90 | * @param args argument list 91 | */ 92 | public static void main(String[] args) { 93 | try { 94 | exit(exec(args), ""); 95 | } catch (Throwable e) { 96 | exitOnThrowable(e); 97 | } 98 | } 99 | 100 | } 101 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/CommonParameters.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store; 20 | 21 | import static org.apache.hadoop.fs.store.StoreEntryPoint.optusage; 22 | 23 | /** 24 | * Common parameters across entry points. 25 | */ 26 | public final class CommonParameters { 27 | 28 | /** {@value}. */ 29 | public static final String TOKENFILE = "tokenfile"; 30 | 31 | /** {@value}. */ 32 | public static final String XMLFILE = "xmlfile"; 33 | 34 | /** file for system properties {@value}. */ 35 | public static final String SYSPROPS = "sysprop"; 36 | 37 | /** {@value}. */ 38 | public static final String DEFINE = "D"; 39 | 40 | /** {@value}. */ 41 | public static final String VERBOSE = "verbose"; 42 | 43 | /** {@value}. */ 44 | public static final String DEBUG = "debug"; 45 | 46 | /** {@value}. */ 47 | public static final String LOG_OVERRIDES = "logoverrides"; 48 | 49 | /** 50 | * Standard options of all entry points. 51 | */ 52 | public static final String STANDARD_OPTS = 53 | optusage(DEFINE, "key=value", "Define a single configuration option") 54 | + optusage(SYSPROPS, "file", "Property file of system properties") 55 | + optusage(TOKENFILE, "file", "Hadoop token file to load") 56 | + optusage(XMLFILE, "file", "XML config file to load") 57 | + optusage(VERBOSE, "verbose output") 58 | + optusage(DEBUG, "enable JVM logs (ALL) and override log4j levels (DEBUG) on specified packages or classes") 59 | + optusage(LOG_OVERRIDES, "file", "A newline separated list of package and class names") 60 | ; 61 | 62 | /** 63 | * File for log4j properties: {@value}. 64 | */ 65 | public static final String LOGFILE = "logfile"; 66 | 67 | /** {@value}. */ 68 | public static final String LIMIT = "limit"; 69 | 70 | /** {@value}. */ 71 | public static final String THREADS = "threads"; 72 | 73 | public static final String BFS = "bfs"; 74 | 75 | public static final String BLOCK = "block"; 76 | 77 | public static final String CSVFILE = "csv"; 78 | 79 | public static final String FLUSH = "flush"; 80 | 81 | public static final String HFLUSH = "hflush"; 82 | 83 | public static final String IGNORE = "ignore"; 84 | 85 | public static final String LARGEST = "largest"; 86 | 87 | public static final String OVERWRITE = "overwrite"; 88 | 89 | public static final String PAGE = "page"; 90 | 91 | public static final String UPDATE = "update"; 92 | 93 | 94 | private CommonParameters() { 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/MinMeanMax.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store; 20 | 21 | import java.util.concurrent.atomic.AtomicInteger; 22 | import java.util.concurrent.atomic.AtomicLong; 23 | 24 | /** 25 | * Simple min/mean/max statistics. 26 | */ 27 | public class MinMeanMax { 28 | 29 | final String name; 30 | final AtomicLong min = new AtomicLong(Long.MAX_VALUE); 31 | final AtomicLong max = new AtomicLong(Long.MIN_VALUE); 32 | final AtomicInteger samples = new AtomicInteger(0); 33 | final AtomicLong sum = new AtomicLong(0); 34 | 35 | public MinMeanMax(final String name) { 36 | this.name = name; 37 | } 38 | 39 | /** 40 | * copilot made this up for us. 41 | * @param value new value 42 | */ 43 | public synchronized void add(final long value) { 44 | min.accumulateAndGet(value, Math::min); 45 | max.accumulateAndGet(value, Math::max); 46 | sum.addAndGet(value); 47 | samples.incrementAndGet(); 48 | } 49 | 50 | public String getName() { 51 | return name; 52 | } 53 | 54 | public long min() { 55 | return min.get(); 56 | } 57 | 58 | public long max() { 59 | return max.get(); 60 | } 61 | 62 | public int samples() { 63 | return samples.get(); 64 | } 65 | 66 | public long sum() { 67 | return sum.get(); 68 | } 69 | 70 | public double mean() { 71 | final int sam = samples.get(); 72 | return sam > 0 ? ((double) sum.get()) / sam : 0; 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/PathCapabilityChecker.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store; 20 | 21 | import java.io.IOException; 22 | import java.lang.reflect.InvocationTargetException; 23 | import java.lang.reflect.Method; 24 | 25 | import org.apache.hadoop.fs.Path; 26 | import org.apache.hadoop.util.ExitUtil; 27 | import org.apache.hadoop.util.VersionInfo; 28 | 29 | import static org.apache.hadoop.fs.store.StoreExitCodes.E_EXCEPTION_THROWN; 30 | import static org.apache.hadoop.fs.store.StoreExitCodes.E_UNSUPPORTED_VERSION; 31 | 32 | public class PathCapabilityChecker { 33 | 34 | private final Method hasPathCapability; 35 | private final Object source; 36 | 37 | public PathCapabilityChecker(Object source) { 38 | this.source = source; 39 | Method method; 40 | try { 41 | method = source.getClass().getMethod("hasPathCapability", 42 | Path.class, String.class); 43 | } catch (NoSuchMethodException e) { 44 | method = null; 45 | } 46 | hasPathCapability = method; 47 | } 48 | 49 | public boolean methodAvailable() { 50 | return hasPathCapability != null; 51 | } 52 | 53 | /** 54 | * Does an object have a capability? 55 | * uses reflection so the jar can compile/run against 56 | * older hadoop releases. 57 | * throws ExitException(E_UNSUPPORTED_VERSION) if the api isn't found. 58 | * @param fs filesystem 59 | * @param path path 60 | * @param capability capability to probe 61 | * @return true iff the interface is available 62 | * @throws IOException fallure 63 | */ 64 | public boolean hasPathCapability(Path path, String capability) 65 | throws IOException { 66 | if (!methodAvailable()) { 67 | throw new ExitUtil.ExitException(E_UNSUPPORTED_VERSION, 68 | "Hadoop version does not support PathCapabilities: " 69 | + VersionInfo.getVersion()); 70 | } 71 | try { 72 | return (Boolean) hasPathCapability.invoke(source, path, capability); 73 | } catch (IllegalAccessException e) { 74 | throw new ExitUtil.ExitException(E_UNSUPPORTED_VERSION, 75 | "Hadoop version does not support PathCapabilities: " 76 | + VersionInfo.getVersion()); 77 | } catch (InvocationTargetException e) { 78 | Throwable ex = e.getTargetException(); 79 | if (ex instanceof IOException) { 80 | throw (IOException) ex; 81 | } else { 82 | throw new ExitUtil.ExitException(E_EXCEPTION_THROWN, 83 | ex.toString(), ex); 84 | } 85 | } 86 | } 87 | 88 | } 89 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/StoreDiagConstants.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store; 20 | 21 | public final class StoreDiagConstants { 22 | 23 | public static final String IOSTATISTICS_LOGGING_LEVEL 24 | = "fs.iostatistics.logging.level"; 25 | 26 | private StoreDiagConstants() { 27 | 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/StoreExitCodes.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store; 20 | 21 | /** 22 | * See LauncherExitCodes; here just to build against older versions 23 | */ 24 | public class StoreExitCodes { 25 | 26 | public static final int E_SUCCESS = 0; 27 | public static final int E_ERROR = -1; 28 | 29 | public static final int E_USAGE = 42; 30 | 31 | public static final int E_INVALID_ARGUMENT = -1; 32 | 33 | public static final int E_NOT_FOUND = 44; 34 | public static final int E_NOT_FOUND2 = 45; 35 | 36 | public static final int E_NO_ACCESS = 41; 37 | 38 | /** 39 | * Exit code when an exception was thrown from the service: {@value}. 40 | *

41 | * Approximate HTTP equivalent: {@code 500 Internal Server Error} 42 | */ 43 | public static final int E_EXCEPTION_THROWN = 50; 44 | 45 | /** 46 | * Unimplemented feature: {@value}. 47 | *

48 | * Approximate HTTP equivalent: {@code 501: Not Implemented} 49 | */ 50 | public static final int E_UNIMPLEMENTED = 51; 51 | 52 | /** 53 | * Service Unavailable; it may be available later: {@value}. 54 | *

55 | * Approximate HTTP equivalent: {@code 503 Service Unavailable} 56 | */ 57 | public static final int E_SERVICE_UNAVAILABLE = 53; 58 | 59 | /** 60 | * The application does not support, or refuses to support this 61 | * version: {@value}. 62 | *

63 | * If raised, this is expected to be raised server-side and likely due 64 | * to client/server version incompatibilities. 65 | *

66 | * Approximate HTTP equivalent: {@code 505: Version Not Supported} 67 | */ 68 | public static final int E_UNSUPPORTED_VERSION = 55; 69 | 70 | } 71 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/StoreLambda.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store; 20 | 21 | import java.io.IOException; 22 | import java.util.ArrayList; 23 | import java.util.List; 24 | 25 | import org.apache.hadoop.fs.LocatedFileStatus; 26 | import org.apache.hadoop.fs.RemoteIterator; 27 | 28 | public class StoreLambda { 29 | 30 | /** 31 | * An interface for use in lambda-expressions working with 32 | * directory tree listings. 33 | */ 34 | @FunctionalInterface 35 | public interface CallOnLocatedFileStatus { 36 | 37 | void call(LocatedFileStatus status) throws IOException; 38 | } 39 | 40 | /** 41 | * An interface for use in lambda-expressions working with 42 | * directory tree listings. 43 | */ 44 | @FunctionalInterface 45 | public interface LocatedFileStatusMap { 46 | 47 | T call(LocatedFileStatus status) throws IOException; 48 | } 49 | 50 | /** 51 | * Apply an operation to every {@link LocatedFileStatus} in a remote 52 | * iterator. 53 | * @param iterator iterator from a list 54 | * @param eval closure to evaluate 55 | * @return the number of files processed 56 | * @throws IOException anything in the closure, or iteration logic. 57 | */ 58 | public static long applyLocatedFiles( 59 | RemoteIterator iterator, 60 | CallOnLocatedFileStatus eval) throws IOException { 61 | long count = 0; 62 | while (iterator.hasNext()) { 63 | count++; 64 | eval.call(iterator.next()); 65 | } 66 | return count; 67 | } 68 | 69 | /** 70 | * Map an operation to every {@link LocatedFileStatus} in a remote 71 | * iterator, returning a list of the results. 72 | * @param return type of map 73 | * @param iterator iterator from a list 74 | * @param eval closure to evaluate 75 | * @return the list of mapped results. 76 | * @throws IOException anything in the closure, or iteration logic. 77 | */ 78 | public static List mapLocatedFiles( 79 | RemoteIterator iterator, 80 | LocatedFileStatusMap eval) throws IOException { 81 | final List results = new ArrayList<>(); 82 | applyLocatedFiles(iterator, 83 | (s) -> results.add(eval.call(s))); 84 | return results; 85 | } 86 | 87 | 88 | } 89 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/abfs/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.abfs; -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/commands/Command.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.commands; 20 | 21 | /** 22 | * This is the base class for the minimal CLI commands. 23 | */ 24 | @SuppressWarnings("UseOfSystemOutOrSystemErr") 25 | public class Command { 26 | 27 | public static void println(String format, Object... args) { 28 | System.out.printf(format, args); 29 | System.out.println(); 30 | } 31 | 32 | protected static void printCommand(String name, String function) { 33 | System.out.printf("%s\t--\t%s%n", name, function); 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/commands/EnvEntry.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.commands; 20 | 21 | /** 22 | * Environment/property entry, with 23 | * methods to convert to a given format. 24 | */ 25 | public class EnvEntry { 26 | final String name; 27 | final String envVar; 28 | final String value; 29 | 30 | public EnvEntry(String name, String envVar, String value) { 31 | this.name = name; 32 | this.envVar = envVar; 33 | this.value = value; 34 | } 35 | 36 | public String xml() { 37 | return String.format("<%s>%n %s%n%n", name, value, name); 38 | } 39 | 40 | public String property() { 41 | return String.format("%s=%s%n", name, value); 42 | } 43 | 44 | 45 | public String cliProperty() { 46 | return String.format("-D %s=%s ", name, value); 47 | } 48 | 49 | 50 | public String spark() { 51 | return String.format("spark.hadoop.%s %s %n", name, value); 52 | } 53 | 54 | public String bash() { 55 | return String.format("export %s=\"%s\"%n", envVar, value); 56 | } 57 | 58 | public String env() { 59 | return String.format("%s=\"%s\"%n", envVar, value); 60 | } 61 | 62 | public String fish() { 63 | return String.format("set -gx %s \"%s\";%n", envVar, value); 64 | } 65 | 66 | public boolean hasEnvVar() { 67 | return !envVar.isEmpty(); 68 | } 69 | 70 | public String getName() { 71 | return name; 72 | } 73 | 74 | public String getEnvVar() { 75 | return envVar; 76 | } 77 | 78 | public String getValue() { 79 | return value; 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/commands/LocalHost.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.commands; 20 | 21 | import java.net.InetAddress; 22 | import java.net.URI; 23 | import java.util.List; 24 | 25 | import org.slf4j.Logger; 26 | import org.slf4j.LoggerFactory; 27 | 28 | import org.apache.hadoop.fs.store.StoreEntryPoint; 29 | import org.apache.hadoop.util.ToolRunner; 30 | 31 | import static org.apache.hadoop.fs.store.CommonParameters.STANDARD_OPTS; 32 | import static org.apache.hadoop.net.NetUtils.getCanonicalUri; 33 | import static org.apache.hadoop.net.NetUtils.getLocalInetAddress; 34 | 35 | /** 36 | * Print the local hostname. 37 | * 38 | * Prints some performance numbers at the end. 39 | */ 40 | public class LocalHost extends StoreEntryPoint { 41 | 42 | private static final Logger LOG = LoggerFactory.getLogger(LocalHost.class); 43 | 44 | public static final String USAGE 45 | = "Usage: localhost\n" 46 | + STANDARD_OPTS; 47 | 48 | public LocalHost() { 49 | createCommandFormat(1, 999); 50 | } 51 | 52 | @Override 53 | public int run(String[] args) throws Exception { 54 | List paths = processArgs(args, 0, 0, USAGE); 55 | 56 | final InetAddress localHost = InetAddress.getLocalHost(); 57 | println("InetAddress.getLocalHost(): %s", 58 | localHost); 59 | println("getLocalInetAddress(): %s", 60 | getLocalInetAddress(localHost.getHostName())); 61 | 62 | println("getLoopbackAddress(): %s", 63 | InetAddress.getLoopbackAddress().getHostName()); 64 | 65 | println("getCanonicalUri(): %s", 66 | getCanonicalUri(new URI("http", 67 | localHost.getCanonicalHostName(), ""), 68 | 0)); 69 | return 0; 70 | } 71 | 72 | /** 73 | * Execute the command, return the result or throw an exception, 74 | * as appropriate. 75 | * @param args argument varags. 76 | * @return return code 77 | * @throws Exception failure 78 | */ 79 | public static int exec(String... args) throws Exception { 80 | return ToolRunner.run(new LocalHost(), args); 81 | } 82 | 83 | /** 84 | * Main entry point. Calls {@code System.exit()} on all execution paths. 85 | * @param args argument list 86 | */ 87 | public static void main(String[] args) { 88 | try { 89 | exit(exec(args), ""); 90 | } catch (Throwable e) { 91 | exitOnThrowable(e); 92 | } 93 | } 94 | 95 | } 96 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/commands/PathCapability.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.commands; 20 | 21 | import java.util.List; 22 | 23 | import org.slf4j.Logger; 24 | import org.slf4j.LoggerFactory; 25 | 26 | import org.apache.hadoop.conf.Configuration; 27 | import org.apache.hadoop.fs.FileSystem; 28 | import org.apache.hadoop.fs.Path; 29 | import org.apache.hadoop.fs.store.PathCapabilityChecker; 30 | import org.apache.hadoop.fs.store.StoreEntryPoint; 31 | import org.apache.hadoop.util.ToolRunner; 32 | 33 | import static org.apache.hadoop.fs.store.CommonParameters.LOGFILE; 34 | import static org.apache.hadoop.fs.store.CommonParameters.STANDARD_OPTS; 35 | import static org.apache.hadoop.fs.store.StoreExitCodes.E_ERROR; 36 | 37 | /** 38 | * Look for a path capability. 39 | */ 40 | public class PathCapability extends StoreEntryPoint { 41 | 42 | private static final Logger LOG = LoggerFactory.getLogger(PathCapability.class); 43 | 44 | public static final String USAGE 45 | = "Usage: pathcapability [options] \n" 46 | + STANDARD_OPTS; 47 | 48 | public PathCapability() { 49 | createCommandFormat(2, 2); 50 | addValueOptions(LOGFILE); 51 | } 52 | 53 | @Override 54 | public int run(String[] args) throws Exception { 55 | List argList = processArgs(args, 1, -1, USAGE); 56 | final Configuration conf = createPreconfiguredConfig(); 57 | 58 | // path on the CLI 59 | String capability = argList.get(0); 60 | String pathString = argList.get(1); 61 | Path path = new Path(pathString); 62 | println("Probing %s for capability %s", path, capability); 63 | FileSystem fs = path.getFileSystem(conf); 64 | println("Using filesystem %s", fs.getUri()); 65 | Path absPath = path.makeQualified(fs.getUri(), fs.getWorkingDirectory()); 66 | if (new PathCapabilityChecker(fs). 67 | hasPathCapability(absPath, capability)) { 68 | 69 | println("Path %s has capability %s", 70 | absPath, capability); 71 | return 0; 72 | } else { 73 | println("Path %s lacks capability %s", 74 | absPath, capability); 75 | return E_ERROR; 76 | } 77 | } 78 | 79 | /** 80 | * Execute the command, return the result or throw an exception, 81 | * as appropriate. 82 | * @param args argument varags. 83 | * @return return code 84 | * @throws Exception failure 85 | */ 86 | public static int exec(String... args) throws Exception { 87 | return ToolRunner.run(new PathCapability(), args); 88 | } 89 | 90 | /** 91 | * Main entry point. Calls {@code System.exit()} on all execution paths. 92 | * @param args argument list 93 | */ 94 | public static void main(String[] args) { 95 | try { 96 | exit(exec(args), ""); 97 | } catch (Throwable e) { 98 | exitOnThrowable(e); 99 | } 100 | } 101 | 102 | } 103 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/commands/PrintStatus.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.commands; 20 | 21 | import java.util.List; 22 | 23 | import org.slf4j.Logger; 24 | import org.slf4j.LoggerFactory; 25 | 26 | import org.apache.commons.io.FileUtils; 27 | import org.apache.hadoop.conf.Configuration; 28 | import org.apache.hadoop.fs.FileStatus; 29 | import org.apache.hadoop.fs.FileSystem; 30 | import org.apache.hadoop.fs.Path; 31 | import org.apache.hadoop.fs.store.StoreDurationInfo; 32 | import org.apache.hadoop.fs.store.StoreEntryPoint; 33 | import org.apache.hadoop.util.ToolRunner; 34 | 35 | import static org.apache.hadoop.fs.store.CommonParameters.STANDARD_OPTS; 36 | 37 | /** 38 | * Print the status. 39 | * 40 | * Prints some performance numbers at the end. 41 | */ 42 | public class PrintStatus extends StoreEntryPoint { 43 | 44 | private static final Logger LOG = LoggerFactory.getLogger(PrintStatus.class); 45 | 46 | public static final String USAGE 47 | = "Usage: filestatus\n" 48 | + STANDARD_OPTS 49 | + " [*]"; 50 | 51 | 52 | public PrintStatus() { 53 | createCommandFormat(1, 999); 54 | } 55 | 56 | @Override 57 | public int run(String[] args) throws Exception { 58 | List paths = processArgs(args, 1, -1, USAGE); 59 | final Configuration conf = createPreconfiguredConfig(); 60 | 61 | final Path source = new Path(paths.get(0)); 62 | FileSystem fs = null; 63 | StoreDurationInfo duration = new StoreDurationInfo(LOG, 64 | "get path status for %s", source); 65 | try { 66 | fs = source.getFileSystem(conf); 67 | for (String path : paths) { 68 | FileStatus st = fs.getFileStatus(new Path(path)); 69 | println("%s\t%s\t[%s]", st.getPath(), st, 70 | FileUtils.byteCountToDisplaySize(st.getLen())); 71 | if (st.isDirectory() && st.getLen() > 0) { 72 | LOG.warn("{} is a directory but its length is {}", path, st.getLen()); 73 | } 74 | if (st.getLen() < 0) { 75 | LOG.warn("{} has a negative length: {}", path, st.getLen()); 76 | } 77 | } 78 | } finally { 79 | duration.close(); 80 | } 81 | long files = paths.size(); 82 | if (files > 1) { 83 | double millisPerFile = (float) duration.value() / files; 84 | println(""); 85 | println("Retrieved the status of %s files, %,.0f milliseconds per file", 86 | files, millisPerFile); 87 | } 88 | 89 | maybeDumpStorageStatistics(fs); 90 | return 0; 91 | } 92 | 93 | /** 94 | * Execute the command, return the result or throw an exception, 95 | * as appropriate. 96 | * @param args argument varags. 97 | * @return return code 98 | * @throws Exception failure 99 | */ 100 | public static int exec(String... args) throws Exception { 101 | return ToolRunner.run(new PrintStatus(), args); 102 | } 103 | 104 | /** 105 | * Main entry point. Calls {@code System.exit()} on all execution paths. 106 | * @param args argument list 107 | */ 108 | public static void main(String[] args) { 109 | try { 110 | exit(exec(args), ""); 111 | } catch (Throwable e) { 112 | exitOnThrowable(e); 113 | } 114 | } 115 | 116 | } 117 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/commands/TarHardened.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.commands; 20 | 21 | import java.io.File; 22 | import java.util.List; 23 | 24 | import org.slf4j.Logger; 25 | import org.slf4j.LoggerFactory; 26 | 27 | import org.apache.hadoop.fs.FileUtil; 28 | import org.apache.hadoop.fs.store.diag.DiagnosticsEntryPoint; 29 | import org.apache.hadoop.util.ToolRunner; 30 | 31 | /** 32 | * Checks to see if the tar command is hardened by taking a command line param and trying 33 | * to untar it. 34 | */ 35 | public class TarHardened extends DiagnosticsEntryPoint { 36 | 37 | private static final Logger LOG = LoggerFactory.getLogger(TarHardened.class); 38 | 39 | public static final String USAGE 40 | = "Usage: tarhardened [filename]"; 41 | 42 | public TarHardened() { 43 | createCommandFormat(0, 1); 44 | } 45 | 46 | @Override 47 | public int run(String[] args) throws Exception { 48 | List paths = processArgs(args, 1, 1, USAGE); 49 | String filename; 50 | if (paths.isEmpty()) { 51 | File tar = File.createTempFile("tarhardened", ".tgz"); 52 | tar.delete(); 53 | filename = tar.getAbsolutePath() + "; true"; 54 | } else { 55 | filename = paths.get(0); 56 | } 57 | File tmpdir = File.createTempFile("tarhardened-dir", ""); 58 | tmpdir.delete(); 59 | final File source = new File(filename); 60 | println("Attempting to untar file with name \"%s\"", source); 61 | FileUtil.unTar(source, tmpdir); 62 | println("untar operation reported success"); 63 | println(); 64 | return 0; 65 | } 66 | 67 | /** 68 | * Execute the command, return the result or throw an exception, 69 | * as appropriate. 70 | * @param args argument varags. 71 | * @return return code 72 | * @throws Exception failure 73 | */ 74 | public static int exec(String... args) throws Exception { 75 | return ToolRunner.run(new TarHardened(), args); 76 | } 77 | 78 | /** 79 | * Main entry point. Calls {@code System.exit()} on all execution paths. 80 | * @param args argument list 81 | */ 82 | public static void main(String[] args) { 83 | try { 84 | exit(exec(args), ""); 85 | } catch (Throwable e) { 86 | exitOnThrowable(e); 87 | } 88 | } 89 | 90 | } 91 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/commands/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.commands; -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/diag/ADLDiagnosticsInfo.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.diag; 20 | 21 | import java.io.IOException; 22 | import java.net.URI; 23 | import java.util.ArrayList; 24 | import java.util.List; 25 | 26 | import org.apache.hadoop.conf.Configuration; 27 | 28 | public class ADLDiagnosticsInfo extends StoreDiagnosticsInfo { 29 | 30 | private static final Object[][] options = { 31 | 32 | {"fs.adl.oauth2.access.token.provider.type", false, false}, 33 | {"fs.adl.oauth2.access.token.provider", false, false}, 34 | {"fs.adl.oauth2.client.id", true, false}, 35 | {"fs.adl.oauth2.credential", true, true}, 36 | {"fs.adl.oauth2.devicecode.clientapp.id", false, false}, 37 | {"fs.adl.oauth2.msi.port", false, false}, 38 | {"fs.adl.oauth2.refresh.token", true, true}, 39 | {"fs.adl.oauth2.refresh.url", true, false}, 40 | {"adl.feature.client.cache.readahead", false, false}, 41 | {"adl.feature.client.cache.drop.behind.writes", false, false}, 42 | {"adl.debug.override.localuserasfileowner", false, false}, 43 | }; 44 | 45 | public static final String[] classnames = { 46 | "org.apache.hadoop.fs.adl.AdlFileSystem", 47 | "com.microsoft.azure.datalake.store.ADLStoreClient", 48 | }; 49 | 50 | public ADLDiagnosticsInfo(final URI fsURI, final Printout output) { 51 | super(fsURI, output); 52 | } 53 | 54 | @Override 55 | public String getName() { 56 | return "Azure Datalake connector"; 57 | } 58 | 59 | @Override 60 | public String getDescription() { 61 | return "ASF Filesystem Connector to Microsoft Azure Datalake"; 62 | } 63 | 64 | @Override 65 | public String getHomepage() { 66 | return "https://hadoop.apache.org/docs/current/hadoop-azure-datalake/index.html"; 67 | } 68 | 69 | @Override 70 | public Object[][] getFilesystemOptions() { 71 | return options; 72 | } 73 | 74 | @Override 75 | public String[] getClassnames(final Configuration conf) { 76 | return classnames; 77 | } 78 | 79 | @Override 80 | public List listEndpointsToProbe(final Configuration conf) 81 | throws IOException { 82 | List uris = new ArrayList<>(2); 83 | addUriOption(uris, conf, "fs.adl.oauth2.refresh.url", "", ""); 84 | String bucket = getFsURI().getHost(); 85 | uris.add(StoreDiag.toURI("host", String.format("https://%s", bucket))); 86 | return uris; 87 | } 88 | 89 | @Override 90 | protected void validateConfig(final Printout printout, 91 | final Configuration conf, final boolean writeOperations) 92 | throws IOException { 93 | super.validateConfig(printout, conf, writeOperations); 94 | warnOnInvalidDomain(printout, ".azuredatalakestore.net", 95 | "https://docs.hortonworks.com/HDPDocuments/HDP2/HDP-2.6.5/bk_cloud-data-access/content/adls-uri.html"); 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/diag/DiagUtils.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.diag; 20 | 21 | import java.util.concurrent.TimeUnit; 22 | import java.util.regex.Pattern; 23 | 24 | public class DiagUtils { 25 | 26 | public static Pattern ipV4pattern() { 27 | return Pattern.compile("^(([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\.){3}([01]?\\d\\d?|2[0-4]\\d|25[0-5])$"); 28 | 29 | } 30 | 31 | public static boolean isIpV4String(String input) { 32 | return ipV4pattern().matcher(input).matches(); 33 | } 34 | 35 | /** 36 | * Determine the suffix for a time unit. 37 | * @param unit time unit 38 | * @return string value for conf files 39 | */ 40 | public static String suffixTimeUnit(TimeUnit unit) { 41 | return ParsedTimeDuration.unitFor(unit).suffix(); 42 | } 43 | 44 | /** 45 | * Go from a time unit to a string suffix. 46 | * This is not the right way to give an eum string, but it's what conf 47 | * does. 48 | */ 49 | public enum ParsedTimeDuration { 50 | NS { 51 | public TimeUnit unit() { return TimeUnit.NANOSECONDS; } 52 | public String suffix() { return "ns"; } 53 | }, 54 | US { 55 | public TimeUnit unit() { return TimeUnit.MICROSECONDS; } 56 | public String suffix() { return "us"; } 57 | }, 58 | MS { 59 | public TimeUnit unit() { return TimeUnit.MILLISECONDS; } 60 | public String suffix() { return "ms"; } 61 | }, 62 | S { 63 | public TimeUnit unit() { return TimeUnit.SECONDS; } 64 | public String suffix() { return "s"; } 65 | }, 66 | M { 67 | public TimeUnit unit() { return TimeUnit.MINUTES; } 68 | public String suffix() { return "m"; } 69 | }, 70 | H { 71 | public TimeUnit unit() { return TimeUnit.HOURS; } 72 | public String suffix() { return "h"; } 73 | }, 74 | D { 75 | public TimeUnit unit() { return TimeUnit.DAYS; } 76 | public String suffix() { return "d"; } 77 | }; 78 | public abstract TimeUnit unit(); 79 | public abstract String suffix(); 80 | public static ParsedTimeDuration unitFor(String s) { 81 | for (ParsedTimeDuration ptd : values()) { 82 | // iteration order is in decl order, so SECONDS matched last 83 | if (s.endsWith(ptd.suffix())) { 84 | return ptd; 85 | } 86 | } 87 | return null; 88 | } 89 | public static ParsedTimeDuration unitFor(TimeUnit unit) { 90 | for (ParsedTimeDuration ptd : values()) { 91 | if (ptd.unit() == unit) { 92 | return ptd; 93 | } 94 | } 95 | return null; 96 | } 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/diag/HBossConstants.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.diag; 20 | 21 | /** 22 | * HBoss support. 23 | * From {@code org.apache.hadoop.hbase.oss.Constants} 24 | */ 25 | public class HBossConstants { 26 | public static final String DATA_URI = "fs.hboss.data.uri"; 27 | public static final String SYNC_IMPL = "fs.hboss.sync.impl"; 28 | 29 | public static final String ZK_CONN_STRING = "fs.hboss.sync.zk.connectionString"; 30 | public static final String ZK_BASE_SLEEP_MS = "fs.hboss.sync.zk.sleep.base.ms"; 31 | public static final String ZK_MAX_RETRIES = "fs.hboss.sync.zk.sleep.max.retries"; 32 | 33 | 34 | public static final String WAIT_INTERVAL_WARN = "fs.hboss.lock-wait.interval.warning"; 35 | 36 | public static final String CAPABILITY_HBOSS = 37 | "org.apache.hadoop.hbase.hboss"; 38 | 39 | } 40 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/diag/StoreDiagException.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.diag; 20 | 21 | import org.apache.hadoop.util.ExitUtil; 22 | 23 | /** 24 | * This is an RTE so we can do tricks in java 8 lambdas. 25 | */ 26 | public class StoreDiagException extends ExitUtil.ExitException { 27 | 28 | public StoreDiagException(final String message, final Object...args) { 29 | this(-1, message, args); 30 | } 31 | 32 | @Override 33 | public synchronized StoreDiagException initCause(final Throwable cause) { 34 | super.initCause(cause); 35 | return this; 36 | } 37 | 38 | public StoreDiagException(final int status, final String message, final Object...args) { 39 | super(status, formatStr(message, args)); 40 | } 41 | 42 | private static String formatStr(final String message, final Object[] args) { 43 | try { 44 | return String.format(message, args); 45 | } catch (Exception e) { 46 | return message; 47 | } 48 | 49 | } 50 | 51 | 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/diag/StoreLogExactlyOnce.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.diag; 20 | 21 | import java.util.concurrent.atomic.AtomicBoolean; 22 | 23 | import org.slf4j.Logger; 24 | 25 | /** 26 | * Log exactly once, even across threads. 27 | */ 28 | public class StoreLogExactlyOnce { 29 | 30 | private final AtomicBoolean logged = new AtomicBoolean(false); 31 | private final Logger log; 32 | 33 | public StoreLogExactlyOnce(final Logger log) { 34 | this.log = log; 35 | } 36 | 37 | public boolean warn(String format, Object...args) { 38 | if (!logged.getAndSet(true)) { 39 | log.warn(format, args); 40 | return true; 41 | } 42 | return false; 43 | } 44 | public boolean info(String format, Object...args) { 45 | if (!logged.getAndSet(true)) { 46 | log.info(format, args); 47 | return true; 48 | } 49 | return false; 50 | } 51 | 52 | public boolean error(String format, Object...args) { 53 | if (!logged.getAndSet(true)) { 54 | log.error(format, args); 55 | return true; 56 | } 57 | return false; 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/diag/TemplateDiagnosticsInfo.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.diag; 20 | 21 | import java.net.URI; 22 | 23 | import org.apache.hadoop.conf.Configuration; 24 | 25 | /** 26 | * This is just a template to use when creating diagnostics bindings. 27 | */ 28 | public class TemplateDiagnosticsInfo extends StoreDiagnosticsInfo { 29 | 30 | /** 31 | * Mandatory classnames. 32 | */ 33 | public static final String[] classnames = { 34 | "com.example.mandatory", 35 | }; 36 | 37 | /** 38 | * Optional classnames. 39 | */ 40 | public static final String[] optionalClassnames = { 41 | "", 42 | }; 43 | 44 | /** 45 | * List of options for filesystems. 46 | * Each entry must be a tuple of (string, password, sensitive). 47 | * "password" entries are read via Configuration.getPassword(), 48 | * so will be read from a credential file. 49 | * Sensitive strings don't have their values fully printed. 50 | */ 51 | private static final Object[][] options = { 52 | 53 | {"fs.FS.something", false, false}, 54 | {"fs.FS.secret", true, true}, 55 | }; 56 | 57 | public TemplateDiagnosticsInfo(final URI fsURI, final Printout output) { 58 | super(fsURI, output); 59 | } 60 | 61 | @Override 62 | public String getName() { 63 | return "NAME"; 64 | } 65 | 66 | @Override 67 | public String getDescription() { 68 | return "Filesystem Connector to " + getName(); 69 | } 70 | 71 | @Override 72 | public String getHomepage() { 73 | return "https://hadoop.apache.org/docs/current/index.html"; 74 | } 75 | 76 | @Override 77 | public Object[][] getFilesystemOptions() { 78 | return options; 79 | } 80 | 81 | @Override 82 | public String[] getClassnames(final Configuration conf) { 83 | return classnames; 84 | } 85 | 86 | @Override 87 | public String[] getOptionalClassnames(final Configuration conf) { 88 | return optionalClassnames; 89 | } 90 | 91 | 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/diag/WasbDiagnosticsInfo.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.diag; 20 | 21 | import java.io.IOException; 22 | import java.net.URI; 23 | 24 | import org.apache.hadoop.conf.Configuration; 25 | 26 | /** 27 | * Wasb diagnostics. 28 | * Doesn't include credential information. 29 | */ 30 | public class WasbDiagnosticsInfo extends StoreDiagnosticsInfo { 31 | 32 | private static final Object[][] options = { 33 | 34 | {"fs.azure.user.agent.prefix", false, false}, 35 | {"fs.azure.secure.mode", false, false}, 36 | {"fs.azure.local.sas.key.mode", false, false}, 37 | {"fs.azure.atomic.rename.dir", false, false}, 38 | {"fs.azure.flatlist.enable", false, false}, 39 | {"fs.azure.autothrottling.enable", false, false}, 40 | {"fs.azure.enable.kerberos.support", false, false}, 41 | {"fs.azure.enable.spnego.token.cache", false, false}, 42 | {"fs.azure.cred.service.urls", false, false}, 43 | {"fs.azure.saskeygenerator.http.retry.policy.enabled", false, false}, 44 | {"fs.azure.saskeygenerator.http.retry.policy.spec", false, false}, 45 | {"fs.azure.saskey.cacheentry.expiry.period", false, false}, 46 | {"fs.azure.authorization.remote.service.urls", false, false}, 47 | {"fs.azure.delegation.token.service.urls", false, false}, 48 | }; 49 | 50 | public static final String[] classnames = { 51 | "com.fasterxml.jackson.databind.ObjectReader", 52 | "com.google.common.base.Preconditions", 53 | "com.microsoft.azure.storage.StorageErrorCode", 54 | "org.apache.commons.logging.Log", 55 | "org.apache.http.client.methods.HttpGet", 56 | "org.eclipse.jetty.util.ajax.JSON", 57 | "org.eclipse.jetty.util.log.Log", 58 | "org.apache.hadoop.fs.azure.NativeAzureFileSystem", 59 | }; 60 | 61 | public static final String[] optionalClassnames = { 62 | "org.apache.commons.lang.StringUtils", 63 | "org.apache.commons.lang3.StringUtils" 64 | }; 65 | public WasbDiagnosticsInfo(final URI fsURI, final Printout output) { 66 | super(fsURI, output); 67 | } 68 | 69 | @Override 70 | public String getName() { 71 | return "Azure WASB connector"; 72 | } 73 | 74 | @Override 75 | public String getDescription() { 76 | return "ASF Filesystem Connector to Microsoft Azure Storage"; 77 | } 78 | 79 | @Override 80 | public String getHomepage() { 81 | return "https://hadoop.apache.org/docs/current/hadoop-azure/index.html"; 82 | } 83 | 84 | @Override 85 | public Object[][] getFilesystemOptions() { 86 | return options; 87 | } 88 | 89 | @Override 90 | public String[] getClassnames(final Configuration conf) { 91 | return classnames; 92 | } 93 | 94 | @Override 95 | public String[] getOptionalClassnames(final Configuration conf) { 96 | return optionalClassnames; 97 | } 98 | 99 | @Override 100 | protected void validateConfig(final Printout printout, 101 | final Configuration conf, final boolean writeOperations) 102 | throws IOException { 103 | super.validateConfig(printout, conf, writeOperations); 104 | warnOnInvalidDomain(printout, ".blob.core.windows.net", 105 | "https://docs.microsoft.com/en-us/azure/storage/blobs/storage-custom-domain-name"); 106 | } 107 | 108 | } 109 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/diag/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | /** 20 | * Storediag and the implementations for the different stores. 21 | */ 22 | 23 | package org.apache.hadoop.fs.store.diag; -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/logging/IOStatisticsIntegration.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.logging; 20 | 21 | import org.apache.hadoop.fs.store.shim.impl.Invocation; 22 | 23 | import static org.apache.hadoop.fs.store.shim.impl.Invocation.unavailable; 24 | import static org.apache.hadoop.fs.store.shim.impl.ShimReflectionSupport.loadClass; 25 | import static org.apache.hadoop.fs.store.shim.impl.ShimReflectionSupport.loadInvocation; 26 | 27 | /** 28 | * Support for IO statistics (initially through reflection). 29 | */ 30 | public class IOStatisticsIntegration { 31 | 32 | public static final String CLASSNAME_IOSTATISTICS = "org.apache.hadoop.fs.statistics.IOStatistics"; 33 | 34 | public static final String CLASSNAME_IOSTATISTICS_LOGGING = "org.apache.hadoop.fs.statistics.IOStatisticsLogging"; 35 | public static final String CLASSNAME_IOSTATISTICS_SUPPORT = "org.apache.hadoop.fs.statistics.IOStatisticsSupport"; 36 | 37 | private final Class ioStatisticsClass; 38 | 39 | private final Class ioStatisticsLogging; 40 | private final Class ioStatisticsSupport; 41 | 42 | private final Invocation _ioStatisticsToPrettyString; 43 | 44 | private final Invocation _retrieveIOStatistics; 45 | 46 | public IOStatisticsIntegration() { 47 | // try to load the class 48 | ioStatisticsClass = loadClass(CLASSNAME_IOSTATISTICS); 49 | if (ioStatisticsClass == null) { 50 | // if that class is missing, so is the rest. 51 | ioStatisticsSupport = null; 52 | ioStatisticsLogging = null; 53 | _ioStatisticsToPrettyString = unavailable("ioStatisticsToPrettyString"); 54 | _retrieveIOStatistics = unavailable("retrieveIOStatistics"); 55 | } else { 56 | ioStatisticsSupport = loadClass(CLASSNAME_IOSTATISTICS_SUPPORT); 57 | _retrieveIOStatistics = loadInvocation(ioStatisticsSupport, 58 | ioStatisticsClass, "retrieveIOStatistics", Object.class); 59 | 60 | ioStatisticsLogging = loadClass(CLASSNAME_IOSTATISTICS_LOGGING); 61 | 62 | _ioStatisticsToPrettyString = loadInvocation(ioStatisticsLogging, 63 | String.class, "ioStatisticsToPrettyString", ioStatisticsClass); 64 | } 65 | } 66 | 67 | public boolean available() { 68 | return ioStatisticsClass != null; 69 | } 70 | 71 | public String ioStatisticsToPrettyString(Object source) { 72 | if (!available()) { 73 | return ""; 74 | } 75 | return _ioStatisticsToPrettyString.invokeUnchecked(null, 76 | _retrieveIOStatistics.invokeUnchecked(null, source)); 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/logging/Log4JController.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.logging; 20 | 21 | import org.apache.log4j.Level; 22 | import org.apache.log4j.Logger; 23 | 24 | /** 25 | * Something to control logging in log4j. 26 | */ 27 | public class Log4JController implements LogControl { 28 | 29 | @Override 30 | public void setLogLevel(final String log, final LogLevel level) { 31 | Logger logger = Logger.getLogger(log); 32 | logger.setLevel(Level.toLevel(level.name())); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/logging/LogControl.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.logging; 20 | 21 | /** 22 | * Interface to assist reflection-based control of logger back ends. 23 | * An instance of LogControl is able to control the log levels of 24 | * loggers for log libraries such as Log4j, yet can be used in 25 | * code designed to support multiple back end loggers behind 26 | * SLF4J. 27 | */ 28 | public interface LogControl { 29 | 30 | /** 31 | * Enumeration of log levels. 32 | * The list is in descending order. 33 | */ 34 | enum LogLevel { 35 | ALL("ALL"), 36 | FATAL("FATAL"), 37 | ERROR("ERROR"), 38 | WARN("WARN"), 39 | INFO("INFO"), 40 | DEBUG("DEBUG"), 41 | TRACE("TRACE"), 42 | OFF("OFF"); 43 | 44 | /** 45 | * Level name. 46 | */ 47 | public final String key; 48 | 49 | LogLevel(final String key) { 50 | this.key = key; 51 | } 52 | 53 | } 54 | 55 | /** 56 | * Sets a log level for a class/package. 57 | * @param log log to set 58 | * @param level level to set 59 | */ 60 | void setLogLevel(String log, LogLevel level); 61 | 62 | 63 | } 64 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/logging/LogControllerFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.logging; 20 | 21 | import java.util.Optional; 22 | 23 | import org.slf4j.Logger; 24 | import org.slf4j.LoggerFactory; 25 | 26 | import org.apache.hadoop.fs.store.diag.StoreLogExactlyOnce; 27 | 28 | /** 29 | * Factory for creating controllers. 30 | * It currently only supports Log4J as a back end. 31 | */ 32 | public final class LogControllerFactory { 33 | private static final Logger LOG = LoggerFactory.getLogger(LogControllerFactory.class); 34 | private static final StoreLogExactlyOnce LOG_ONCE = new StoreLogExactlyOnce(LOG); 35 | 36 | /** 37 | * Class name of log controller implementation to be loaded 38 | * through reflection. 39 | */ 40 | public static final String LOG4J = "org.apache.hadoop.fs.store.logging.Log4JController"; 41 | 42 | private LogControllerFactory() { 43 | } 44 | 45 | /** 46 | * create a controller. 47 | * @return the instantiated controllerl or empty of the class can't be instantiated. 48 | */ 49 | public static Optional createController(String classname) { 50 | try { 51 | Class clazz = Class.forName(classname); 52 | return Optional.of((LogControl) clazz.newInstance()); 53 | } catch (ClassNotFoundException | InstantiationException | IllegalAccessException 54 | | ClassCastException e) { 55 | LOG_ONCE.warn("Failed to create controller {}: {}", classname, e, e); 56 | return Optional.empty(); 57 | } 58 | } 59 | 60 | 61 | } 62 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/logging/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.logging; -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/s3a/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | /** 20 | * various s3a specific stuff which compiles across all supported hadoop 21 | * versions. 22 | */ 23 | 24 | package org.apache.hadoop.fs.store.s3a; -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/shim/APIShim.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.shim; 20 | 21 | /** 22 | * An API shim of type {@code TYPE}. 23 | * 24 | * @param the type which the shim wraps. 25 | */ 26 | public interface APIShim extends IsImplemented { 27 | /** 28 | * Get the instance. 29 | * 30 | * @return instance being shimmed. 31 | */ 32 | TYPE getInstance(); 33 | 34 | } 35 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/shim/IsImplemented.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.shim; 20 | 21 | /** 22 | * Interface to probe for feature being directly available in 23 | * the Hadoop runtime, through the shim API. 24 | * If a capability is not implemented it may be because 25 | *

    26 | *
  1. The capability is unknown in this release of the shim library.
  2. 27 | *
  3. The capability is unknown in the shim class.
  4. 28 | *
  5. The capability is known in the shim class but the hadoop runtime 29 | * lacks the API.
  6. 30 | *
  7. The capability is known, the API exists but is not supported by the object 31 | * instance to the which shim class is bound.
  8. 32 | *
  9. The capability is known but disabled/not working.
  10. 33 | *
34 | * The API may be dynamic, where a fallback happens after the failure of a direct 35 | * invocation. 36 | */ 37 | public interface IsImplemented { 38 | 39 | /** 40 | * Is a feature directly available by the wrapped class, rather 41 | * than being emulated by the shim library. 42 | * 43 | * @param capability capability/feature to probe for 44 | * 45 | * @return true if the wrapped class supports it directly 46 | */ 47 | default boolean isImplemented(String capability) { 48 | return false; 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/shim/impl/AbstractAPIShim.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.shim.impl; 20 | 21 | import org.apache.hadoop.fs.store.shim.APIShim; 22 | 23 | import static java.util.Objects.requireNonNull; 24 | 25 | /** 26 | * Base class for any API shim. 27 | * Subclasses provide the class-specific operations; one of them must then 28 | * be created for every instance of the class to work with. 29 | * @param class being shimmed to. 30 | */ 31 | public abstract class AbstractAPIShim implements APIShim { 32 | 33 | /** 34 | * Class being shimmed. 35 | */ 36 | private final Class clazz; 37 | 38 | /** 39 | * Instance being shimmed. 40 | */ 41 | private final T instance; 42 | 43 | /** 44 | * Constructor. 45 | * @param clazz Class being shimmed. 46 | * @param instance Instance being shimmed. 47 | */ 48 | public AbstractAPIShim(final Class clazz, final T instance) { 49 | this.clazz = requireNonNull(clazz); 50 | this.instance = requireNonNull(instance); 51 | } 52 | 53 | /** 54 | * Get the class. 55 | * @return class being shimmed. 56 | */ 57 | public Class getClazz() { 58 | return clazz; 59 | } 60 | 61 | @Override 62 | public T getInstance() { 63 | return instance; 64 | } 65 | 66 | @Override 67 | public String toString() { 68 | return "AbstractAPIShim{" + 69 | "instance=" + instance + 70 | '}'; 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/shim/impl/Invocation.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.shim.impl; 20 | 21 | import java.io.IOException; 22 | import java.io.UncheckedIOException; 23 | import java.lang.reflect.Method; 24 | 25 | /** 26 | * A method which can be invoked. 27 | * @param return type. 28 | */ 29 | public final class Invocation { 30 | 31 | /** 32 | * Method name for error messages. 33 | */ 34 | private final String name; 35 | 36 | /** 37 | * Method to invoke. 38 | */ 39 | private final Method method; 40 | 41 | /** 42 | * Create. 43 | * @param name invocation name for error messages. 44 | * @param method method to invoke. 45 | */ 46 | public Invocation(final String name, final Method method) { 47 | this.name = name; 48 | this.method = method; 49 | } 50 | 51 | /** 52 | * Is the method available. 53 | * @return true if the invocation is available. 54 | */ 55 | public boolean available() { 56 | return method != null; 57 | } 58 | 59 | /** 60 | * Invoke the method with exception unwrap/uprate. 61 | * If {@link #method} is null, raise UnsupportedOperationException 62 | * @param instance instance to invoke 63 | * @param parameters parameters 64 | * @return the result 65 | * @throws UnsupportedOperationException if the method is null 66 | * @throws RuntimeException for all RTEs raised by invoked methods except UncheckedIOEs 67 | * @throws IOException when converting/unwrappping thrown exceptions 68 | */ 69 | public T invoke( 70 | final Object instance, 71 | final Object... parameters) throws IOException { 72 | return (T) ShimReflectionSupport.invokeOperation(name, instance, method, parameters); 73 | } 74 | 75 | /** 76 | * Invoke the method with exception unwrap/uprate. 77 | * If {@link #method} is null, raise UnsupportedOperationException 78 | * @param instance instance to invoke 79 | * @param parameters parameters 80 | * @return the result 81 | * @throws UnsupportedOperationException if the method is null 82 | * @throws RuntimeException for all RTEs raised by invoked methods except UncheckedIOEs 83 | * @throws UncheckedIOException wrapped IOE 84 | */ 85 | public T invokeUnchecked( 86 | final Object instance, 87 | final Object... parameters) { 88 | return (T) ShimReflectionSupport.invokeUnchecked(name, instance, method, parameters); 89 | } 90 | 91 | /** 92 | * Generate an invocation which is always unavailable. 93 | * @param name name for the exception text. 94 | * @return an invocation which always raises 95 | */ 96 | public static Invocation unavailable(String name) { 97 | return new Invocation(name, null); 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/shim/impl/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.shim.impl; -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/shim/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.shim; -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/test/AbstractS3AStoreTest.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.test; 20 | 21 | import java.io.File; 22 | import java.net.URI; 23 | 24 | import org.junit.Before; 25 | import org.junit.BeforeClass; 26 | import org.junit.Rule; 27 | import org.junit.rules.TestName; 28 | import org.junit.rules.Timeout; 29 | import org.slf4j.Logger; 30 | import org.slf4j.LoggerFactory; 31 | 32 | import org.apache.hadoop.conf.Configuration; 33 | import org.apache.hadoop.fs.FileSystem; 34 | import org.apache.hadoop.fs.Path; 35 | import org.apache.hadoop.fs.contract.AbstractBondedFSContract; 36 | import org.apache.hadoop.fs.contract.AbstractFSContract; 37 | import org.apache.hadoop.fs.contract.AbstractFSContractTestBase; 38 | import org.apache.hadoop.fs.s3a.S3AFileSystem; 39 | 40 | 41 | /** 42 | * As the S3A test base isn't available, do enough to make it look 43 | * like it is, to ease later merge. 44 | */ 45 | public class AbstractS3AStoreTest extends AbstractFSContractTestBase { 46 | protected static final Logger LOG = 47 | LoggerFactory.getLogger(AbstractS3AStoreTest.class); 48 | private Path root; 49 | private Path testPath; 50 | 51 | @Override 52 | protected AbstractFSContract createContract(Configuration conf) { 53 | return new S3AStoreContract(conf); 54 | } 55 | 56 | private File methodDir; 57 | private File sourceDir; 58 | private S3AFileSystem fileSystem; 59 | 60 | @Rule 61 | public TestName methodName = new TestName(); 62 | 63 | /** 64 | * Set the timeout for every test. 65 | */ 66 | @Rule 67 | public Timeout testTimeout = new Timeout(600 * 1000); 68 | 69 | @BeforeClass 70 | public static void classSetup() throws Exception { 71 | Thread.currentThread().setName("JUnit"); 72 | } 73 | 74 | public Configuration createConfiguration() { 75 | return new Configuration(); 76 | } 77 | 78 | @Override 79 | public S3AFileSystem getFileSystem() { 80 | return fileSystem; 81 | } 82 | 83 | @Before 84 | public void setup() throws Exception { 85 | super.setup(); 86 | String key = String.format(AbstractBondedFSContract.FSNAME_OPTION, "s3a"); 87 | Configuration conf = createConfiguration(); 88 | String fsVal = conf.getTrimmed(key); 89 | assertFalse("No FS set in " + key, fsVal == null || fsVal.isEmpty()); 90 | URI fsURI = new URI(fsVal); 91 | assertEquals("Not an S3A Filesystem: " + fsURI, 92 | "s3a", fsURI.getScheme()); 93 | fileSystem = (S3AFileSystem) FileSystem.get(fsURI, conf); 94 | root = new Path(getFileSystem().getUri()); 95 | } 96 | 97 | 98 | public Path methodPath() { 99 | return new Path(testPath, methodName.getMethodName()); 100 | } 101 | 102 | } 103 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/test/S3AStoreContract.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.test; 20 | 21 | import org.apache.hadoop.conf.Configuration; 22 | import org.apache.hadoop.fs.contract.AbstractBondedFSContract; 23 | 24 | /** 25 | * The contract of S3A: only enabled if the test bucket is provided. 26 | */ 27 | public class S3AStoreContract extends AbstractBondedFSContract { 28 | 29 | public static final String CONTRACT_XML = "cloudstore/s3a.xml"; 30 | 31 | public S3AStoreContract(Configuration conf) { 32 | super(conf); 33 | //insert the base features 34 | addConfResource(CONTRACT_XML); 35 | } 36 | 37 | @Override 38 | public String getScheme() { 39 | return "s3a"; 40 | } 41 | 42 | 43 | } 44 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/store/test/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.test; -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/tools/csv/CsvWriterWithCRC.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.tools.csv; 20 | 21 | import java.io.IOException; 22 | import java.io.OutputStream; 23 | import java.io.PrintWriter; 24 | import java.io.Writer; 25 | import java.nio.charset.StandardCharsets; 26 | import java.util.zip.CRC32; 27 | 28 | /** 29 | * This is a small utility class to write out rows to a CSV/TSV file. 30 | * It does not do any escaping of written text, so don't write entries 31 | * containing separators. 32 | * Quoting must be done external to this class. 33 | */ 34 | public final class CsvWriterWithCRC extends SimpleCsvWriter { 35 | 36 | private CRC32 rowCrc = new CRC32(); 37 | 38 | /** 39 | * Instantiate. 40 | * @param out output writer. 41 | * @param separator field separator. 42 | * @param eol end of line sequence 43 | * @param quote quote columns? 44 | */ 45 | public CsvWriterWithCRC( 46 | final Writer out, 47 | final String separator, 48 | final String eol, 49 | final boolean quote) { 50 | super(out, separator, eol, quote, true); 51 | } 52 | 53 | /** 54 | * Instantiate. 55 | * @param out output stream. 56 | * @param separator field separator. 57 | * @param eol end of line sequence 58 | * @param quote quote columns? 59 | */ 60 | public CsvWriterWithCRC( 61 | final OutputStream out, 62 | final String separator, 63 | final String eol, 64 | final boolean quote) { 65 | this(new PrintWriter(out), separator, eol, quote); 66 | } 67 | 68 | @Override 69 | public void write(String val) throws IOException { 70 | super.write(val); 71 | rowCrc.update(val.getBytes(StandardCharsets.UTF_8)); 72 | } 73 | 74 | 75 | /** 76 | * Write a newline. This does not update the CRC. 77 | * @return this instance 78 | * @throws IOException IO failure. 79 | */ 80 | @Override 81 | public SimpleCsvWriter newline() throws IOException { 82 | super.newline(); 83 | rowCrc.update(getEol().getBytes(StandardCharsets.UTF_8)); 84 | return this; 85 | } 86 | 87 | /** 88 | * get the row CRC. 89 | * @return the row crc 90 | */ 91 | public long getRowCrc() { 92 | return rowCrc.getValue(); 93 | } 94 | 95 | public void resetRowCrc() { 96 | rowCrc.reset(); 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/main/java/org/apache/hadoop/fs/tools/csv/package-info.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | /* CSV support. */ 20 | package org.apache.hadoop.fs.tools.csv; -------------------------------------------------------------------------------- /src/main/java/pathcapability.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.PathCapability; 21 | 22 | public class pathcapability extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | PathCapability.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("pathcapability", "probe for path capabilities"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/regions.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.Regions; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class regions extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | Regions.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("regions", "emulate region lookup of AWS SDK"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/regions2.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.Regions; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class regions2 extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | Regions.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("regions2", "emulate region lookup of AWS SDK"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/restore.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.RestoreObject; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class restore extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | RestoreObject.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("restore", "Restore a versioned S3 object"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/safeprefetch.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.abfs.SafePrefetch; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class safeprefetch extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | SafePrefetch.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("safeprefetch", "abfs prefetch safety check"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/sessionkeys.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.SessionKeys; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class sessionkeys extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | SessionKeys.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("sessionkeys", "store diagnostics"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/storediag.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.diag.StoreDiag; 21 | 22 | public class storediag extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | StoreDiag.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("storediag", "store diagnostics"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/tarhardened.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.TarHardened; 21 | 22 | public class tarhardened extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | TarHardened.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("tarhardened", "Validate tar hardening"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/tlsinfo.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.store.commands.Command; 20 | import org.apache.hadoop.fs.store.commands.TLSInfo; 21 | 22 | public class tlsinfo extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | TLSInfo.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("tlsinfo\t", "Print TLS information"); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/java/undelete.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | import org.apache.hadoop.fs.s3a.sdk.Undelete; 20 | import org.apache.hadoop.fs.store.commands.Command; 21 | 22 | public class undelete extends Command { 23 | 24 | public static void main(String[] args) throws Exception { 25 | Undelete.main(args); 26 | } 27 | 28 | public static void help() { 29 | printCommand("undelete", "undelete s3 objects by removing tombstones"); 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/resources/META-INF/MANIFEST.MF: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /src/main/resources/cloudstore/diagnostics.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 20 | 21 | 22 | 23 | fs.s3a.aws.credentials.provider 24 | 25 | org.apache.hadoop.fs.store.s3a.DiagnosticsAWSCredentialsProvider, 26 | org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider, 27 | org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider, 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /src/main/resources/cloudstore/required.txt: -------------------------------------------------------------------------------- 1 | org.apache.hadoop.fs.store.s3a.DiagnosticsAWSCredentialsProvider -------------------------------------------------------------------------------- /src/main/resources/cloudstore/s3a.xml: -------------------------------------------------------------------------------- 1 | 18 | 19 | 20 | 24 | 25 | 26 | fs.contract.test.root-tests-enabled 27 | true 28 | 29 | 30 | 31 | fs.contract.test.random-seek-count 32 | 10 33 | 34 | 35 | 36 | fs.contract.is-blobstore 37 | true 38 | 39 | 40 | 41 | fs.contract.create-visibility-delayed 42 | true 43 | 44 | 45 | 46 | fs.contract.is-case-sensitive 47 | true 48 | 49 | 50 | 51 | fs.contract.rename-returns-false-if-source-missing 52 | true 53 | 54 | 55 | 56 | fs.contract.rename-remove-dest-if-empty-dir 57 | true 58 | 59 | 60 | 61 | fs.contract.supports-append 62 | false 63 | 64 | 65 | 66 | fs.contract.supports-atomic-directory-delete 67 | false 68 | 69 | 70 | 71 | fs.contract.supports-atomic-rename 72 | false 73 | 74 | 75 | 76 | fs.contract.supports-block-locality 77 | false 78 | 79 | 80 | 81 | fs.contract.supports-concat 82 | false 83 | 84 | 85 | 86 | fs.contract.supports-getfilestatus 87 | true 88 | 89 | 90 | 91 | fs.contract.supports-seek 92 | true 93 | 94 | 95 | 96 | fs.contract.supports-seek-on-closed-file 97 | true 98 | 99 | 100 | 101 | fs.contract.rejects-seek-past-eof 102 | true 103 | 104 | 105 | 106 | fs.contract.supports-strict-exceptions 107 | true 108 | 109 | 110 | 111 | fs.contract.supports-unix-permissions 112 | false 113 | 114 | 115 | 116 | fs.contract.rename-overwrites-dest 117 | false 118 | 119 | 120 | 121 | -------------------------------------------------------------------------------- /src/main/site/bucketmetadata.md: -------------------------------------------------------------------------------- 1 | 14 | 15 | # Command `bucketmetadata` 16 | 17 | Retrieves metadata from an S3 Bucket (v2 SDK only) by probing the store, provided 18 | the caller has the permission to issue this request -sometimes it is disabled. 19 | 20 | For S3 Express buckets, this includes the Availability Zone in which the region is deployed, as well as its type (which will be `AvailabilityZone`). 21 | 22 | For S3 standard buckets, the location type and name will be `null`. 23 | 24 | Although it is possible to identify S3 Express buckets by their name, this is not the case when referenced through AWS Access Points; this probe against the store will 25 | 26 | ## Example: S3 Standard 27 | 28 | ``` 29 | bin/hadoop jar $CLOUDSTORE bucketmetadata s3a://example-london/ 30 | 31 | Getting bucket info for s3a://example-london/ 32 | ============================================= 33 | 34 | Bucket metadata from S3 35 | Region eu-west-2 36 | Location Name null 37 | Location Type null 38 | ``` 39 | 40 | The bucket region is eu-west-2; it has no location type or name. 41 | 42 | ## Example: Access point 43 | 44 | Here an access point is used to reference the same bucket, using the per bucket-option 45 | `fs.s3a.bucket.example-ap.accesspoint.arn` of a "virtual" bucket name `example-ap` 46 | to remap the request to the Access Point. 47 | 48 | ```xml 49 | 50 | fs.s3a.bucket.example-ap.accesspoint.arn 51 | arn:aws:s3:eu-west-2:152813711128:accesspoint/ap-example-london 52 | AccessPoint bound to example-ap which relays to example-london 53 | 54 | ``` 55 | 56 | The response indicates that this is mapped to an S3 Standard bucket. 57 | 58 | ``` 59 | bin/hadoop jar cloudstore-1.0.jar bucketmetadata s3a://example-ap 60 | 61 | Getting bucket info for s3a://example-ap 62 | ======================================== 63 | 64 | 2024-09-25 11:24:32,908 [main] INFO s3a.S3AFileSystem (S3AFileSystem.java:initialize(578)) - Using AccessPoint ARN "arn:aws:s3:eu-west-2:152813711128:accesspoint/ap-example-london" for bucket example-ap 65 | Bucket metadata from S3 66 | Region eu-west-2 67 | Location Name null 68 | Location Type null 69 | ``` 70 | 71 | 72 | ## Example: S3 Express 73 | 74 | When probing an S3 Express bucket, the location type and name is returned. 75 | 76 | ``` 77 | hadoop jar cloudstore-1.0.jar bucketmetadata s3a://example--usw2-az1--x-s3 78 | 79 | Getting bucket info for s3a://example--usw2-az1--x-s3 80 | ===================================================== 81 | 82 | Bucket metadata from S3 83 | Region us-west-2 84 | Location Name usw2-az1 85 | Location Type AvailabilityZone 86 | ``` 87 | 88 | ## Third Party stores 89 | 90 | The result of the probe against third party stores is undefined, and will vary with the store. 91 | 92 | Here is an example response from a probe of a Dell ECS store: 93 | 94 | ``` 95 | bin/hadoop jar cloudstore-1.0.jar bucketmetadata s3a://ecsbucket/ 96 | 97 | Getting bucket info for s3a://ecsbucket/ 98 | ========================================= 99 | 100 | Bucket metadata from S3 101 | Region null 102 | Location Name null 103 | Location Type null 104 | 105 | ``` -------------------------------------------------------------------------------- /src/main/site/constval.md: -------------------------------------------------------------------------------- 1 | 14 | 15 | # Command `constval` 16 | 17 | Loads a class, resolves a constant/static final field and prints its value. 18 | 19 | 20 | ```bash 21 | 22 | hadoop jar cloudstore-1.0.jar constval org.apache.hadoop.fs.s3a.Constants REQUEST_TIMEOUT 23 | Value of org.apache.hadoop.fs.s3a.Constants.REQUEST_TIMEOUT = "fs.s3a.connection.request.timeout" 24 | 25 | hadoop jar cloudstore-1.0.jar constval org.apache.hadoop.fs.s3a.Constants DEFAULT_REQUEST_TIMEOUT_DURATION 26 | Value of org.apache.hadoop.fs.s3a.Constants.DEFAULT_REQUEST_TIMEOUT_DURATION = "PT0S" 27 | 28 | hadoop jar cloudstore-1.0.jar constval org.apache.hadoop.fs.s3a.Constants DEFAULT_REQUEST_TIMEOUT 29 | Value of org.apache.hadoop.fs.s3a.Constants.DEFAULT_REQUEST_TIMEOUT = "0" 30 | 31 | ``` 32 | -------------------------------------------------------------------------------- /src/main/site/diagnosticsawscredentialsprovider.md: -------------------------------------------------------------------------------- 1 | 14 | 15 | # DiagnosticsAWSCredentialsProvider 16 | 17 | A new credential provider which prints obfuscated and MD5 values of the AWS secrets. 18 | 19 | This leaks some information and so the logs must be considered as sensitive as the output 20 | of storediag commands. 21 | 22 | It does not attempt to do any authentication, simply print those values used by the temporary/simple 23 | credential providers. 24 | 25 | ## Usage 26 | 27 | 1. Get into the same classloader as the s3a FS, which means into `share/hadoop/common/lib` 28 | 2. Add to the list of credential providers 29 | 30 | ```xml 31 | 32 | fs.s3a.aws.credentials.provider 33 | 34 | org.apache.hadoop.fs.store.s3a.DiagnosticsAWSCredentialsProvider, 35 | org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider, 36 | org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider, 37 | 38 | 39 | ``` 40 | 41 | *Notes* 42 | 43 | * If using S3A Delegation Tokens, the delegation token binding takes over 44 | authenticating with s3 -the values in fs.s3a.aws.credentials.provider _may_ not be read. 45 | * It's not enough to set this option and invoke via cloudstore commands; the class isn't found. 46 | This may be related to the change for HADOOP-17372, but that was forced by odd things happening 47 | if a HiveConfig or similar was passed in. 48 | 49 | ## Output from an operation 50 | 51 | ``` 52 | 2022-10-03 16:41:15,135 [main] INFO s3a.DiagnosticsAWSCredentialsProvider (DiagnosticsAWSCredentialsProvider.java:printSecretOption(135)) 53 | - Option fs.s3a.access.key = "AK**************66YB" [20] D51E40E203A4137FFE7CAB1BA000000 from [core-site.xml] 54 | 2022-10-03 16:41:15,135 [main] INFO s3a.DiagnosticsAWSCredentialsProvider (DiagnosticsAWSCredentialsProvider.java:printSecretOption(135)) 55 | - Option fs.s3a.secret.key = "Bq**********************************dfix" [40] BAA1DCAB58875154AA0B77A000000E0 from [core-site.xml] 56 | 2022-10-03 16:41:15,135 [main] INFO s3a.DiagnosticsAWSCredentialsProvider (DiagnosticsAWSCredentialsProvider.java:printSecretOption(138)) - 57 | Option fs.s3a.session.token unset 58 | ``` 59 | 60 | -------------------------------------------------------------------------------- /src/main/site/mkbucket.md: -------------------------------------------------------------------------------- 1 | 14 | 15 | # Command `mkbucket` 16 | 17 | Creates a bucket. 18 | 19 | Usage 20 | ``` 21 | Usage: mkbucket 22 | ``` 23 | 24 | ```bash 25 | hadoop jar cloudstore-1.0.jar mkbucket us-east-2 s3a://new-bucket-name/ 26 | ``` 27 | 28 | The per-bucket settings of the target bucket name are used to create the bucket, 29 | for example the endpoint and login details. 30 | However, if you are attempting complex configurations, e.g. creating buckets for 31 | a different account, it is a lot safer to set the base configuration options. 32 | -------------------------------------------------------------------------------- /src/main/site/mkcsv.md: -------------------------------------------------------------------------------- 1 | 14 | 15 | # Command `mkcsv` 16 | 17 | Creates a CSV file with a given path; useful for scale testing CSV processing. 18 | 19 | ```bash 20 | hadoop jar cloudstore-1.0.jar mkcsv -header -quote -verbose 10000 s3a://bucket/file.csv 21 | ``` 22 | 23 | The format is a variable width sequence, with entries cross referencing each other for validation. 24 | ```csv 25 | "start","rowId","length","dataCrc","data","rowId2","rowCrc","end" 26 | "start","1","87","691051183","bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb","1","2707924207","end" 27 | "start","2","40","2886466480","cccccccccccccccccccccccccccccccccccccccc","2","2141198053","end" 28 | "start","3","98","3320970725","dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd","3","4203069111","end" 29 | "start","4","8","1257926895","eeeeeeee","4","189792478","end" 30 | "start","5","25","1630497970","fffffffffffffffffffffffff","5","1034603103","end" 31 | "start","6","38","557554018","gggggggggggggggggggggggggggggggggggggg","6","1412646710","end" 32 | "start","7","86","951894681","hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh","7","2062289315","end" 33 | "start","8","45","3065088391","iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii","8","3774714774","end" 34 | "start","9","70","2839984696","jjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjjj","9","303056462","end" 35 | ``` 36 | 37 | ## Invariants 38 | 39 | For each row 40 | ```java 41 | start == "start" 42 | rowId == rowId2 43 | length == a random int >= 0 44 | data = string where data.length() == length 45 | elements of data == char c where c in "[a-z][A-Z][0-9]" 46 | dataCrc == new CRC32().update(data.getBytes(StandardCharsets.UTF_8)) 47 | rowCrC == crc32 of all previous fields, including quotes, *excluding separators* 48 | end == "end" 49 | // and ignoring headers 50 | forall n: row[n].rowID == n 51 | ``` 52 | 53 | 54 | ## Schemas for Apache Spark 55 | ```scala 56 | 57 | /** 58 | * Dataset class. 59 | * Latest build is "start","rowId","length","dataCrc","data","rowId2","rowCrc","end" 60 | */ 61 | case class CsvRecord( 62 | start: String, 63 | rowId: Long, 64 | length: Long, 65 | dataCrc: Long, 66 | data: String, 67 | rowId2: Long, 68 | rowCrc: Long, 69 | end: String) 70 | 71 | /** 72 | * The StructType of the CSV data. 73 | * "start","rowId","length","dataCrc","data","rowId2","rowCrc","end" 74 | */ 75 | val csvSchema: StructType = { 76 | new StructType(). 77 | add("start", StringType). 78 | add("rowId", LongType). 79 | add("length", LongType). 80 | add("dataCrc", LongType). 81 | add("data", StringType). 82 | add("rowId2", LongType). 83 | add("rowCrc", LongType). 84 | add("end", StringType) 85 | } 86 | 87 | ``` -------------------------------------------------------------------------------- /src/main/site/safeprefetch.md: -------------------------------------------------------------------------------- 1 | 14 | 15 | # Command `safeprefetch`: validate prefetch safety of abfs client 16 | 17 | Command to probe an abfs store for prefetch safety. 18 | 19 | If safe: returns a status code of 0 20 | 21 | If unsafe, prints configuration options to disable prefetching 22 | and then returns the exit code -1. 23 | 24 | The safety probe considers an abfs store safe if *any* of the conditions are met 25 | 26 | * It is from a release *before* `HADOOP-17156. Clear abfs readahead requests on stream close`, 27 | * It has the fix `HADOOP-18546. ABFS. disable purging list of in progress reads in abfs stream close()` 28 | * The `fs.azure.readaheadqueue.depth` is 0 29 | * Cloudera releases: readahead has been completely disabled 30 | (`HADOOP-18517. ABFS: Add fs.azure.enable.readahead option to disable readahead` is in all CDH releases with the bug) 31 | 32 | The probe for the fix relies on `HADOOP-18577. ABFS: Add probes of readahead fix`; a pathcapabilities probe 33 | `fs.azure.capability.readahead.safe` has been added to all abfs releases with the fix. 34 | 35 | ## Example, probe of a (safe) hadoop 3.3.5 36 | 37 | 38 | ``` 39 | bin/hadoop jar $CLOUDSTORE safeprefetch abfs://stevel-testing@stevelukwest.dfs.core.windows.net/user 40 | 41 | Probing abfs://stevel-testing@stevelukwest.dfs.core.windows.net/user for prefetch safety 42 | Using filesystem abfs://stevel-testing@stevelukwest.dfs.core.windows.net 43 | Filesystem abfs://stevel-testing@stevelukwest.dfs.core.windows.net/user has prefetch issue fixed (has path capability fs.azure.capability.readahead.safe) 44 | ``` 45 | 46 | ## Example, probe of hadoop 3.3.4 -unsafe 47 | 48 | ``` 49 | bin/hadoop jar $CLOUDSTORE safeprefetch abfs://stevel-testing@stevelukwest.dfs.core.windows.net/user 50 | Probing abfs://stevel-testing@stevelukwest.dfs.core.windows.net/user for prefetch safety 51 | Using filesystem abfs://stevel-testing@stevelukwest.dfs.core.windows.net 52 | Store is vulnerable to inconsistent prefetching. This MUST be disabled 53 | 54 | WARNING: Filesystem is vulnerable until prefetching is disabled 55 | hadoop XML: 56 | 57 | 58 | 0 59 | 60 | 61 | 62 | 63 | 64 | spark: 65 | spark.hadoop.fs.azure.readaheadqueue.depth 0 66 | 67 | 68 | 2022-12-19 12:32:06,003 [main] INFO util.ExitUtil (ExitUtil.java:terminate(241)) - Exiting with status -1: 69 | 70 | ``` 71 | 72 | -------------------------------------------------------------------------------- /src/main/site/sessionkey.md: -------------------------------------------------------------------------------- 1 | 14 | 15 | # sessionkeys 16 | 17 | Generates a set of session keys from the AWS credentials used to log in to a bucket; 18 | prints them as: XML, bash env vars, fish env vars, key=val properties. 19 | For the XML and properties files, also prints the credential providers option 20 | to use temporary credentials 21 | 22 | Validity: 36h 23 | 24 | This is to aid with generating temp keys to use with throwaway test clusters 25 | that may be shared with colleagues, avoids having to 26 | 27 | *Note*: only available on cloudstore builds with the "extra" profile enabled; and when 28 | executed against a version of Hadoop (3.2+) which provides API access to the credential 29 | chain. 30 | 31 | ``` 32 | bin/hadoop jar $CLOUDSTORE sessionkeys s3a://landsat-pds/ 33 | 2020-08-25 14:02:36,996 [main] INFO extra.SessionKeys (DurationInfo.java:(53)) - Starting: session 34 | 2020-08-25 14:02:40,295 [main] INFO extra.STSClientFactory2 (STSClientFactory2.java:lambda$requestSessionCredentials$0(146)) - 35 | Requesting Amazon STS Session credentials 36 | 37 | XML settings 38 | ============ 39 | 40 | 41 | ASIASHFDIJDQGFIYYOJ7V 42 | 43 | 44 | ApNyF4qyAFupyypY2aB/QZxyCVNb 45 | 46 | 47 | ABCDEF00000000000= 48 | 49 | 50 | org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider 51 | 52 | 53 | s3.amazonaws.com 54 | 55 | 56 | 57 | Properties 58 | ========== 59 | 60 | fs.s3a.aws.credentials.provider=org.apache.hadoop.fs.s3a.TemporaryAWSCredentialsProvider 61 | fs.s3a.access.key=ASIASHFDIJDQGFIYYOJ7V 62 | fs.s3a.secret.key=ApNyF4qyAFupyypY2aB/QZxyCVNb 63 | fs.s3a.session.token=ABCDEF00000000000= 64 | fs.s3a.bucket.landsat-pds.endpoint=s3.amazonaws.com 65 | 66 | 67 | Bash 68 | ==== 69 | 70 | export AWS_ACCESS_KEY_ID=ASIASHFDIJDQGFIYYOJ7V 71 | export AWS_SECRET_ACCESS_KEY=ApNyF4qyAFupyypY2aB/QZxyCVNb 72 | export AWS_SESSION_TOKEN=ABCDEF00000000000= 73 | 74 | 75 | Fish 76 | ==== 77 | 78 | set -gx AWS_ACCESS_KEY_ID ASIASHFDIJDQGFIYYOJ7V 79 | set -gx AWS_SECRET_ACCESS_KEY ApNyF4qyAFupyypY2aB/QZxyCVNb 80 | set -gx AWS_SESSION_TOKEN ABCDEF00000000000= 81 | 82 | 2020-08-25 14:02:40,840 [main] INFO extra.SessionKeys (DurationInfo.java:close(100)) - session: duration 0:03:849 83 | ``` -------------------------------------------------------------------------------- /src/main/site/tarhardened.md: -------------------------------------------------------------------------------- 1 | 14 | 15 | # Command `tarhardened` 16 | 17 | Verify the hadoop release has had its untar command hardened and will 18 | not evaluate commands passed in as filenames. 19 | 20 | ```bash 21 | bin/hadoop jar $CLOUDSTORE tarhardened "file.tar; true" 22 | ``` 23 | 24 | *Bad* 25 | 26 | ``` 27 | Attempting to untar file with name "file.tar; true" 28 | untar operation reported success 29 | 30 | 2023-01-27 16:42:35,931 [main] INFO util.ExitUtil (ExitUtil.java:terminate(124)) - Exiting with status 0 31 | ``` 32 | 33 | Although the file doesn't exist, the bash "true" command was executed after the untar, so 34 | the operation was reported as a success. 35 | 36 | *Good* 37 | 38 | ``` 39 | 2023-01-27 16:48:44,461 [main] INFO util.ExitUtil (ExitUtil.java:terminate(210)) - Exiting with status -1: ExitCodeException exitCode=1: tar: Error opening archive: Failed to open 'file.tar; true' 40 | 41 | ``` 42 | 43 | The file `file.tar; true` was attempted to be opened; as it is not present the operation failed. 44 | Expect a stack trace in the report 45 | -------------------------------------------------------------------------------- /src/test/java/org/apache/hadoop/fs/s3a/cloudup/ITestS3ACloudup.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.s3a.cloudup; 20 | 21 | import java.io.File; 22 | 23 | import org.junit.After; 24 | import org.junit.Before; 25 | import org.junit.BeforeClass; 26 | import org.junit.Test; 27 | import org.slf4j.Logger; 28 | import org.slf4j.LoggerFactory; 29 | 30 | import org.apache.hadoop.fs.FileUtil; 31 | import org.apache.hadoop.fs.Path; 32 | import org.apache.hadoop.fs.store.test.AbstractS3AStoreTest; 33 | import org.apache.hadoop.fs.tools.cloudup.Cloudup; 34 | import org.apache.hadoop.tools.store.StoreTestUtils; 35 | 36 | import static org.apache.hadoop.fs.contract.ContractTestUtils.cleanup; 37 | import static org.apache.hadoop.tools.store.StoreTestUtils.createTestDir; 38 | import static org.apache.hadoop.tools.store.StoreTestUtils.createTestFiles; 39 | import static org.apache.hadoop.tools.store.StoreTestUtils.expectSuccess; 40 | 41 | /** 42 | * As the S3A test base isn't available, do enough to make it look 43 | * like it is, to ease later merge. 44 | */ 45 | public class ITestS3ACloudup extends AbstractS3AStoreTest { 46 | protected static final Logger LOG = 47 | LoggerFactory.getLogger(ITestS3ACloudup.class); 48 | private Path root; 49 | private Path testPath; 50 | 51 | 52 | private static File testDirectory; 53 | private File methodDir; 54 | private File sourceDir; 55 | 56 | @BeforeClass 57 | public static void classSetup() throws Exception { 58 | testDirectory = createTestDir(); 59 | } 60 | 61 | @Before 62 | public void setup() throws Exception { 63 | super.setup(); 64 | root = new Path(getFileSystem().getUri()); 65 | testPath = new Path(root, "/ITestS3ACloudup"); 66 | 67 | methodDir = new File(testDirectory, methodName.getMethodName()); 68 | StoreTestUtils.mkdirs(methodDir); 69 | sourceDir = new File(methodDir, "src"); 70 | FileUtil.fullyDelete(sourceDir); 71 | } 72 | 73 | 74 | @After 75 | public void teardown() throws Exception { 76 | if (methodDir != null) { 77 | FileUtil.fullyDelete(methodDir); 78 | } 79 | cleanup("TEARDOWN", getFileSystem(), testPath); 80 | } 81 | 82 | @Test 83 | public void testUpload() throws Throwable { 84 | Path dest = methodPath(); 85 | int expected = createTestFiles(sourceDir, 256); 86 | expectSuccess( 87 | new Cloudup(), 88 | "-s", sourceDir.toURI().toString(), 89 | "-d", dest.toUri().toString(), 90 | "-t", "16", 91 | "-o", 92 | "-l", "3"); 93 | 94 | } 95 | 96 | public Path methodPath() { 97 | return new Path(testPath, methodName.getMethodName()); 98 | } 99 | 100 | } 101 | -------------------------------------------------------------------------------- /src/test/java/org/apache/hadoop/fs/store/commands/FieldsForTesting.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.commands; 20 | 21 | public class FieldsForTesting { 22 | 23 | public static final String FIELDS = FieldsForTesting.class.getName(); 24 | public static final int INT = 1; 25 | public static final boolean BOOL = true; 26 | 27 | 28 | public static final String NULLSTR = null; 29 | 30 | private static final String privateString = "private"; 31 | 32 | private final String notStatic = "notStatic"; 33 | } 34 | -------------------------------------------------------------------------------- /src/test/java/org/apache/hadoop/fs/store/commands/TestConstval.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.commands;/* 20 | * Licensed to the Apache Software Foundation (ASF) under one 21 | * or more contributor license agreements. See the NOTICE file 22 | * distributed with this work for additional information 23 | * regarding copyright ownership. The ASF licenses this file 24 | * to you under the Apache License, Version 2.0 (the 25 | * "License"); you may not use this file except in compliance 26 | * with the License. You may obtain a copy of the License at 27 | * 28 | * http://www.apache.org/licenses/LICENSE-2.0 29 | * 30 | * Unless required by applicable law or agreed to in writing, software 31 | * distributed under the License is distributed on an "AS IS" BASIS, 32 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 33 | * See the License for the specific language governing permissions and 34 | * limitations under the License. 35 | */ 36 | 37 | import org.assertj.core.api.Assertions; 38 | import org.junit.Test; 39 | 40 | import static org.apache.hadoop.fs.store.StoreExitCodes.E_NOT_FOUND; 41 | import static org.apache.hadoop.fs.store.StoreExitCodes.E_NOT_FOUND2; 42 | import static org.apache.hadoop.fs.store.StoreExitCodes.E_USAGE; 43 | import static org.apache.hadoop.fs.store.commands.Constval.lookupFieldValue; 44 | import static org.apache.hadoop.fs.store.commands.FieldsForTesting.FIELDS; 45 | import static org.apache.hadoop.tools.store.StoreTestUtils.expectExitException; 46 | 47 | public class TestConstval { 48 | 49 | 50 | public static final String CONSTVAL = "org.apache.hadoop.fs.store.commands.Constval"; 51 | 52 | private static void expectValue(String classname, String field, String expected) { 53 | String value = lookupFieldValue(classname, field); 54 | Assertions.assertThat(value) 55 | .describedAs("Field " + field + " in " + classname) 56 | .isEqualTo(expected); 57 | } 58 | @Test 59 | public void testRun() { 60 | } 61 | 62 | @Test 63 | public void testLookupString() { 64 | expectValue(CONSTVAL, "USAGE", Constval.USAGE); 65 | } 66 | @Test 67 | public void testLookupInt() { 68 | expectValue(FIELDS, "INT", FieldsForTesting.INT + ""); 69 | } 70 | 71 | @Test 72 | public void testLookupBool() { 73 | expectValue(FIELDS, "BOOL", FieldsForTesting.BOOL + ""); 74 | } 75 | 76 | 77 | 78 | @Test 79 | public void testLookupNull() { 80 | expectValue(FIELDS, "NULLSTR", Constval.NULL); 81 | } 82 | 83 | @Test 84 | public void testMissingClass() throws Exception { 85 | expectExitException(E_NOT_FOUND, 86 | () -> lookupFieldValue(CONSTVAL + "2", "not_found")); 87 | } 88 | 89 | @Test 90 | public void testMissingField() throws Exception { 91 | expectExitException(E_NOT_FOUND2, 92 | () -> lookupFieldValue(CONSTVAL, "not_found")); 93 | } 94 | 95 | @Test 96 | public void testExecNoArgs() throws Exception { 97 | expectExitException(E_USAGE, 98 | () -> Constval.exec()); 99 | } 100 | @Test 101 | public void testExecBool() throws Exception { 102 | Constval.exec(FIELDS, "BOOL"); 103 | } 104 | 105 | } -------------------------------------------------------------------------------- /src/test/java/org/apache/hadoop/fs/store/logging/TestLog4JController.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.fs.store.logging; 20 | 21 | import java.util.Optional; 22 | 23 | import org.assertj.core.api.Assertions; 24 | import org.junit.Test; 25 | import org.slf4j.Logger; 26 | import org.slf4j.LoggerFactory; 27 | 28 | public class TestLog4JController { 29 | 30 | private static final Logger LOG = LoggerFactory.getLogger(TestLog4JController.class); 31 | 32 | @Test 33 | public void testCreateLogger() throws Throwable { 34 | Optional control = 35 | LogControllerFactory.createController(LogControllerFactory.LOG4J); 36 | Assertions.assertThat(control) 37 | .describedAs("created controller") 38 | .isPresent() 39 | .containsInstanceOf(Log4JController.class); 40 | } 41 | 42 | @Test 43 | public void testConfigureLevel() throws Throwable { 44 | LogControl control = 45 | LogControllerFactory.createController(LogControllerFactory.LOG4J).get(); 46 | final String name = this.getClass().getName(); 47 | control.setLogLevel(name, LogControl.LogLevel.DEBUG); 48 | LOG.debug("debug at debug level"); 49 | control.setLogLevel(name, LogControl.LogLevel.INFO); 50 | LOG.info("info at info level"); 51 | LOG.debug("debug at info level"); 52 | 53 | LOG.info("switching back to debug"); 54 | control.setLogLevel(name, LogControl.LogLevel.DEBUG); 55 | LOG.debug("debug at debug level again"); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/test/java/org/apache/hadoop/tools/store/TestDiagUtils.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package org.apache.hadoop.tools.store; 20 | 21 | import java.util.regex.Matcher; 22 | import java.util.regex.Pattern; 23 | 24 | import org.junit.Test; 25 | 26 | import org.apache.hadoop.fs.store.diag.DiagUtils; 27 | 28 | import static org.assertj.core.api.Assertions.assertThat; 29 | 30 | public class TestDiagUtils { 31 | final Pattern ipv4 = DiagUtils.ipV4pattern(); 32 | 33 | @Test 34 | public void testIPV4Match() throws Throwable { 35 | assertMatches("0.0.0.0", true); 36 | assertMatches("10.0.1.255", true); 37 | assertMatches("s3.amazonaws.com", false); 38 | assertMatches("10", false); 39 | 40 | } 41 | 42 | private void assertMatches(final String input, final boolean expected) { 43 | final Matcher matcher = ipv4.matcher(input); 44 | assertThat(matcher.matches()) 45 | .describedAs("Match against %s", input) 46 | .isEqualTo(expected); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/test/resources/core-site.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 20 | 21 | 22 | 23 | 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /src/test/resources/logback-test.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 24 | 25 | 26 | 27 | 28 | %d{ISO8601} [%t] %-5p %c{2} (%F:%M(%L)) - %m%n 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | --------------------------------------------------------------------------------