stringIterator = iterators.get(0);
52 |
53 | return stringIterator.next();
54 | }
55 |
56 | @Override
57 | public void remove() {
58 | throw new UnsupportedOperationException();
59 | }
60 |
61 | @Override
62 | public void forEachRemaining(Consumer super T> action) {
63 | throw new UnsupportedOperationException();
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/GoogleStorageWagon/deploy/gcs_repository_bucket.tf:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | variable "bucket_name" {
18 | }
19 |
20 | variable "location" {
21 | default = "europe-west2"
22 | }
23 |
24 | variable "project" {
25 | }
26 |
27 | provider "google" {
28 | project = "${var.project}"
29 | }
30 |
31 | resource "google_storage_bucket" "cloud_storage_maven_repo" {
32 | name = "${var.bucket_name}"
33 | location = "${var.location}"
34 |
35 | project = "${var.project}"
36 | }
37 |
38 | resource "google_project_iam_custom_role" "cloud_storage_maven_role" {
39 | role_id = "${replace(var.bucket_name,"-","_")}Role"
40 | title = "${var.bucket_name}Role"
41 | description = "Cloud Storage Maven Repository Bucket Role"
42 | permissions = ["storage.objects.create","storage.objects.get","storage.objects.list","storage.objects.delete"]
43 | }
44 |
45 | resource "google_service_account" "cloud_storage_service_account" {
46 | account_id = "${var.bucket_name}-sa"
47 | display_name = "${var.bucket_name}ServiceAccount"
48 | }
49 |
50 | resource "google_storage_bucket_iam_binding" "cloud_storage_bucket_service_account_iam_binding" {
51 | bucket = "${google_storage_bucket.cloud_storage_maven_repo.id}"
52 | members = [
53 | "serviceAccount:${google_service_account.cloud_storage_service_account.email}"
54 | ]
55 | role = "${google_project_iam_custom_role.cloud_storage_maven_role.id}"
56 | }
57 |
--------------------------------------------------------------------------------
/CloudStorageCore/src/main/java/com/gkatzioura/maven/cloud/wagon/PublicReadProperty.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.wagon;
18 |
19 | public class PublicReadProperty {
20 |
21 | private static final String PUBLIC_REPOSITORY_PROP_TAG = "publicRepository";
22 | private static final String PUBLIC_REPOSITORY_ENV_TAG = "PUBLIC_REPOSITORY";
23 |
24 | private Boolean publicRepository;
25 |
26 | /**
27 | *
28 | * @param publicRepository may be null
29 | */
30 | public PublicReadProperty(Boolean publicRepository) {
31 | this.publicRepository = publicRepository;
32 | }
33 |
34 | /**
35 | * return the publicRepository set in the constructor or the publicRepository set using the PUBLIC_REPOSITORY system property
36 | * */
37 | public boolean get() {
38 | if (publicRepository != null){
39 | return publicRepository;
40 | }
41 |
42 | String publicRepositoryProp = System.getProperty(PUBLIC_REPOSITORY_PROP_TAG);
43 | if(publicRepositoryProp != null) {
44 | return Boolean.valueOf(publicRepositoryProp);
45 | }
46 |
47 | String publicRepositoryEnv = System.getenv(PUBLIC_REPOSITORY_ENV_TAG);
48 | if(publicRepositoryEnv!= null) {
49 | return Boolean.valueOf(publicRepositoryEnv);
50 | }
51 |
52 | return false;
53 | }
54 |
55 | }
56 |
--------------------------------------------------------------------------------
/CloudStorageCore/src/main/java/com/gkatzioura/maven/cloud/transfer/TransferProgressFileOutputStream.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.transfer;
18 |
19 | import java.io.File;
20 | import java.io.FileNotFoundException;
21 | import java.io.FileOutputStream;
22 | import java.io.IOException;
23 |
24 | public final class TransferProgressFileOutputStream extends FileOutputStream {
25 |
26 | private final TransferProgress transferProgress;
27 |
28 | public TransferProgressFileOutputStream(File file, TransferProgress transferProgress) throws FileNotFoundException {
29 | super(file);
30 | this.transferProgress = transferProgress;
31 | }
32 |
33 | @Override
34 | public void write(int b) throws IOException {
35 | super.write(b);
36 | this.transferProgress.progress(new byte[]{(byte) b}, 1);
37 | }
38 |
39 | @Override
40 | public void write(byte b[]) throws IOException {
41 | super.write(b);
42 | this.transferProgress.progress(b, b.length);
43 | }
44 |
45 | @Override
46 | public void write(byte b[], int off, int len) throws IOException {
47 | super.write(b, off, len);
48 | if (off == 0) {
49 | this.transferProgress.progress(b, len);
50 | } else {
51 | byte[] bytes = new byte[len];
52 | System.arraycopy(b, off, bytes, 0, len);
53 | this.transferProgress.progress(bytes, len);
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/AzureStorageWagon/deploy/azure_repository_storage.tf:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | variable "storage_account_name" {
18 | }
19 |
20 | variable "location" {
21 | default = "westeurope"
22 | }
23 |
24 | variable "resource_group_name" {
25 | default = "CloudStorageMaven"
26 | }
27 |
28 | resource "azurerm_resource_group" "cloud_storage_maven_resource_group" {
29 | name = "${var.resource_group_name}"
30 | location = "${var.location}"
31 | }
32 |
33 | resource "azurerm_storage_account" "cloud_storage_maven_storage_account" {
34 | name = "${var.storage_account_name}"
35 | resource_group_name = "${azurerm_resource_group.cloud_storage_maven_resource_group.name}"
36 | location = "${var.location}"
37 | account_tier = "Standard"
38 | account_replication_type = "LRS"
39 | }
40 |
41 | resource "azurerm_storage_container" "cloud_storage_maven_storage_account_snapshot_container" {
42 | name = "snapshot"
43 | resource_group_name = "${var.resource_group_name}"
44 | storage_account_name = "${azurerm_storage_account.cloud_storage_maven_storage_account.name}"
45 | container_access_type = "private"
46 | }
47 |
48 | resource "azurerm_storage_container" "cloud_storage_maven_storage_account_release_container" {
49 | name = "release"
50 | resource_group_name = "${var.resource_group_name}"
51 | storage_account_name = "${azurerm_storage_account.cloud_storage_maven_storage_account.name}"
52 | container_access_type = "private"
53 | }
54 |
--------------------------------------------------------------------------------
/CloudStorageCore/src/test/java/com/gkatzioura/maven/cloud/transfer/KeyResolverTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.transfer;
18 |
19 | import org.junit.Assert;
20 | import org.junit.Test;
21 |
22 | import com.gkatzioura.maven.cloud.resolver.KeyResolver;
23 |
24 | public class KeyResolverTest {
25 |
26 | @Test
27 | public void resolveSlashDirectories() {
28 |
29 | KeyResolver keyResolver = new KeyResolver();
30 | String directoryJoin = keyResolver.resolve("/t/","/tesanother/key/");
31 | Assert.assertEquals("t/tesanother/key", directoryJoin);
32 | }
33 |
34 | @Test
35 | public void resolveEmptyBaseDirectory() {
36 |
37 | KeyResolver keyResolver = new KeyResolver();
38 | String directoryJoin = keyResolver.resolve("","/tesanother/key/");
39 | Assert.assertEquals("tesanother/key", directoryJoin);
40 | }
41 |
42 | @Test
43 | public void testResolveSimple() {
44 |
45 | KeyResolver keyResolver = new KeyResolver();
46 | String directoryJoin = keyResolver.resolve("/tesanother/key/");
47 | Assert.assertEquals("tesanother/key", directoryJoin);
48 | }
49 |
50 | @Test
51 | public void testResolveConcat() {
52 |
53 | KeyResolver keyResolver = new KeyResolver();
54 | String directoryJoin = keyResolver.resolve("test-repo/release/production/", "/tesanother/key");
55 | Assert.assertEquals("test-repo/release/production/tesanother/key", directoryJoin);
56 | }
57 |
58 | }
59 |
--------------------------------------------------------------------------------
/S3StorageWagon/src/test/java/com/gkatzioura/maven/cloud/s3/S3StorageRegionProviderChainTest.java:
--------------------------------------------------------------------------------
1 | package com.gkatzioura.maven.cloud.s3;
2 |
3 | import com.amazonaws.regions.AwsEnvVarOverrideRegionProvider;
4 | import org.junit.Assert;
5 | import org.junit.Test;
6 | import org.junit.runner.RunWith;
7 | import org.powermock.core.classloader.annotations.PrepareForTest;
8 | import org.powermock.modules.junit4.PowerMockRunner;
9 |
10 | import static com.amazonaws.SDKGlobalConfiguration.AWS_REGION_ENV_VAR;
11 | import static com.amazonaws.SDKGlobalConfiguration.AWS_REGION_SYSTEM_PROPERTY;
12 | import static org.powermock.api.mockito.PowerMockito.*;
13 |
14 | @RunWith(PowerMockRunner.class)
15 | public class S3StorageRegionProviderChainTest {
16 |
17 | private final static String PROVIDED_REGION = "provided-region";
18 | private final static String ENV_VAR_REGION = "env-var-region";
19 | private final static String SYSTEM_PROPERTY_REGION = "sys-prop-region";
20 |
21 | @Test
22 | public void testProvidedRegionConstructor() {
23 | final S3StorageRegionProviderChain regionProvider = new S3StorageRegionProviderChain(PROVIDED_REGION);
24 | Assert.assertEquals(PROVIDED_REGION, regionProvider.getRegion());
25 | }
26 |
27 | @Test
28 | @PrepareForTest(AwsEnvVarOverrideRegionProvider.class)
29 | public void testEnvVarRegion() {
30 | mockStatic(System.class);
31 | when(System.getenv(AWS_REGION_ENV_VAR)).thenReturn(ENV_VAR_REGION);
32 |
33 | final S3StorageRegionProviderChain regionProvider = new S3StorageRegionProviderChain();
34 | Assert.assertEquals(ENV_VAR_REGION, regionProvider.getRegion());
35 | }
36 |
37 | @Test
38 | @PrepareForTest(AwsDefaultEnvRegionProvider.class)
39 | public void testDefaultEnvVarRegion() {
40 | mockStatic(System.class);
41 | when(System.getenv("AWS_DEFAULT_REGION")).thenReturn(ENV_VAR_REGION);
42 |
43 | final S3StorageRegionProviderChain regionProvider = new S3StorageRegionProviderChain();
44 | Assert.assertEquals(ENV_VAR_REGION, regionProvider.getRegion());
45 | }
46 |
47 | @Test
48 | public void testSystemPropertyRegion() {
49 | System.setProperty(AWS_REGION_SYSTEM_PROPERTY, SYSTEM_PROPERTY_REGION);
50 |
51 | final S3StorageRegionProviderChain regionProvider = new S3StorageRegionProviderChain();
52 | Assert.assertEquals(SYSTEM_PROPERTY_REGION, regionProvider.getRegion());
53 |
54 | System.clearProperty(AWS_REGION_SYSTEM_PROPERTY);
55 | }
56 |
57 | }
58 |
--------------------------------------------------------------------------------
/CloudStorageCore/src/test/java/com/gkatzioura/maven/cloud/transfer/BaseDirectoryResolverTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.transfer;
18 |
19 | import org.apache.maven.wagon.repository.Repository;
20 | import org.junit.Assert;
21 | import org.junit.Test;
22 |
23 | import com.gkatzioura.maven.cloud.resolver.BaseDirectoryResolver;
24 |
25 | public class BaseDirectoryResolverTest {
26 |
27 | @Test
28 | public void testResolveEmptySlash() {
29 |
30 | BaseDirectoryResolver directoryResolver = new BaseDirectoryResolver();
31 | Repository repository = new Repository("test-repo","gs://test-repo/");
32 | String baseDirectory = directoryResolver.resolve(repository);
33 | Assert.assertTrue(baseDirectory.isEmpty());
34 | }
35 |
36 | @Test
37 | public void testResolveEmpty() {
38 |
39 | BaseDirectoryResolver directoryResolver = new BaseDirectoryResolver();
40 | Repository repository = new Repository("test-repo","gs://test-repo");
41 | String baseDirectory = directoryResolver.resolve(repository);
42 | Assert.assertTrue(baseDirectory.isEmpty());
43 | }
44 |
45 | @Test
46 | public void testResolve() {
47 |
48 | BaseDirectoryResolver directoryResolver = new BaseDirectoryResolver();
49 | Repository repository = new Repository("test-repo","gs://test-repo/release");
50 | String baseDirectory = directoryResolver.resolve(repository);
51 | Assert.assertEquals("release/",baseDirectory);
52 | }
53 |
54 | @Test
55 | public void testResolveSubDirectory() {
56 |
57 | BaseDirectoryResolver directoryResolver = new BaseDirectoryResolver();
58 | Repository repository = new Repository("test-repo","gs://test-repo/release/production");
59 | String baseDirectory = directoryResolver.resolve(repository);
60 | Assert.assertEquals("release/production/",baseDirectory);
61 | }
62 |
63 | }
64 |
--------------------------------------------------------------------------------
/AzureStorageWagon/src/main/java/com/gkatzioura/maven/cloud/abs/ConnectionStringFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.abs;
18 |
19 | import java.util.logging.Logger;
20 |
21 | import org.apache.maven.wagon.authentication.AuthenticationException;
22 | import org.apache.maven.wagon.authentication.AuthenticationInfo;
23 |
24 | public class ConnectionStringFactory {
25 |
26 | private static final String CONNECTION_STRING_TEMPLATE = "DefaultEndpointsProtocol=https;AccountName=%s;AccountKey=%s;EndpointSuffix=core.windows.net";
27 |
28 | private static final Logger LOGGER = Logger.getLogger(ConnectionStringFactory.class.getName());
29 |
30 | public String create(AuthenticationInfo authenticationInfo) throws AuthenticationException {
31 |
32 | if(authenticationInfo==null) {
33 | throw new AuthenticationException("Please provide storage account credentials");
34 | }
35 |
36 | String username = authenticationInfo.getUserName();
37 | String password = authenticationInfo.getPassword();
38 |
39 | if (username == null || username.isEmpty()) {
40 | return password;
41 | } else {
42 | return String.format(CONNECTION_STRING_TEMPLATE, username, password);
43 | }
44 | }
45 |
46 | /**
47 | * This shall create the connection string based on the environmental params
48 | * @return
49 | * @throws AuthenticationException
50 | */
51 | public String create() throws AuthenticationException {
52 | String accountName = System.getenv("ACCOUNT_NAME");
53 | String accountKey = System.getenv("ACCOUNT_KEY");
54 |
55 | if(accountName ==null || accountKey == null) {
56 | throw new AuthenticationException("Please provide storage account credentials using environmental variables");
57 | }
58 |
59 | return String.format(CONNECTION_STRING_TEMPLATE,accountName,accountKey);
60 | }
61 |
62 | }
63 |
--------------------------------------------------------------------------------
/S3StorageWagon/src/main/java/com/gkatzioura/maven/cloud/s3/CredentialsFactory.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.s3;
18 |
19 | import java.util.logging.Logger;
20 |
21 | import org.apache.maven.wagon.authentication.AuthenticationInfo;
22 |
23 | import com.amazonaws.auth.AWSCredentialsProvider;
24 | import com.amazonaws.auth.AWSStaticCredentialsProvider;
25 | import com.amazonaws.auth.BasicAWSCredentials;
26 | import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
27 |
28 | public class CredentialsFactory {
29 |
30 | private static final Logger LOGGER = Logger.getLogger(CredentialsFactory.class.getName());
31 |
32 | /**
33 | * Creates an {@link AWSCredentialsProvider} from the passed {@link AuthenticationInfo}. This should contain the
34 | * username and password used to authenticate when connecting to AWS .
35 | * When {@code authenticationInfo} is passed as {@code null}, a {@link DefaultAWSCredentialsProviderChain} will be
36 | * used. This is an authentication provider that gets the credentials from Java environment properties, system
37 | * environment variables or other global locations. See the {@link DefaultAWSCredentialsProviderChain} documentation
38 | * for details.
39 | *
40 | * @param authenticationInfo an {@link AuthenticationInfo} containing the AWS credentials to use
41 | * @return a newly-built {@link AWSCredentialsProvider} with the credentials associated to the passed
42 | * {@code authenticationInfo}
43 | */
44 | public AWSCredentialsProvider create(AuthenticationInfo authenticationInfo) {
45 | if(authenticationInfo==null) {
46 | return new DefaultAWSCredentialsProviderChain();
47 | } else {
48 | LOGGER.info("Using static credentials provider");
49 | return new AWSStaticCredentialsProvider(new BasicAWSCredentials(authenticationInfo.getUserName(),authenticationInfo.getPassword()));
50 | }
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/S3StorageWagon/deploy/s3_repository_bucket.tf:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | variable "bucket_name" {
18 | }
19 |
20 | resource "aws_s3_bucket" "cloud_storage_maven_repo" {
21 | bucket = "${var.bucket_name}"
22 | acl = "private"
23 |
24 | tags = {
25 | CloudStorageMaven = ""
26 | }
27 | }
28 |
29 | resource "aws_iam_policy" "cloud_storage_maven_repo_policy" {
30 | name = "${var.bucket_name}_policy"
31 | path = "/"
32 | description = "Cloud Storage Maven Repository Bucket Policy"
33 |
34 | policy = < {
30 |
31 | private AmazonS3 amazonS3;
32 | private String prefix;
33 | private String bucket;
34 |
35 | private ObjectListing tempListing = null;
36 | private List currentKeys = new ArrayList<>();
37 |
38 | public PrefixKeysIterator(AmazonS3 amazonS3, String bucket, String prefix) {
39 | this.amazonS3 = amazonS3;
40 | this.bucket = bucket;
41 | this.prefix = prefix;
42 | }
43 |
44 | @Override
45 | public void remove() {
46 | throw new UnsupportedOperationException();
47 | }
48 |
49 | @Override public void forEachRemaining(Consumer super String> action) {
50 | throw new UnsupportedOperationException();
51 | }
52 |
53 | @Override
54 | public boolean hasNext() {
55 | if(currentKeys.size()>0) {
56 | return true;
57 | }
58 |
59 | fetchKeysIfExist();
60 | return currentKeys.size()>0;
61 | }
62 |
63 | private void fetchKeysIfExist() {
64 | if(tempListing==null) {
65 | tempListing = getObjectListing();
66 | currentKeys.addAll(tempListing.getObjectSummaries());
67 | } else {
68 | if(tempListing.isTruncated()) {
69 | tempListing = amazonS3.listNextBatchOfObjects(tempListing);
70 | currentKeys.addAll(tempListing.getObjectSummaries());
71 | }
72 | }
73 | }
74 |
75 | private ObjectListing getObjectListing() {
76 |
77 | return amazonS3.listObjects(new ListObjectsRequest()
78 | .withBucketName(bucket)
79 | .withPrefix(prefix));
80 | }
81 |
82 | @Override
83 | public String next() {
84 | if(!hasNext()) {
85 | return null;
86 | }
87 |
88 | return currentKeys.remove(0).getKey();
89 | }
90 |
91 | }
92 |
--------------------------------------------------------------------------------
/AzureStorageWagon/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | cloud-storage-maven
5 | com.gkatzioura.maven.cloud
6 | 2.3
7 |
8 | 4.0.0
9 |
10 | azure-storage-wagon
11 | maven-plugin
12 |
13 | Azure blob storage wagon
14 |
15 |
16 | 7.0.0
17 |
18 |
19 |
20 |
21 | Apache License, Version 2.0
22 | http://www.apache.org/licenses/LICENSE-2.0.txt
23 | repo
24 |
25 |
26 |
27 |
28 |
29 | org.apache.maven.wagon
30 | wagon-provider-api
31 | ${wagon.version}
32 | provided
33 |
34 |
35 | org.apache.maven
36 | maven-plugin-api
37 | ${maven.plugin.api.version}
38 | provided
39 |
40 |
41 | com.microsoft.azure
42 | azure-storage
43 | ${azure-storage.version}
44 |
45 |
46 | com.gkatzioura.maven.cloud
47 | cloud-storage-core
48 | ${project.parent.version}
49 |
50 |
51 | commons-io
52 | commons-io
53 | ${commons-io.version}
54 |
55 |
56 | org.apache.maven.plugin-tools
57 | maven-plugin-annotations
58 | ${maven.plugin.annotations.version}
59 | provided
60 |
61 |
62 | junit
63 | junit
64 | ${junit.version}
65 | test
66 |
67 |
68 |
69 |
70 |
71 |
72 | org.apache.maven.plugins
73 | maven-plugin-plugin
74 | 3.3
75 |
76 |
77 | default-descriptor
78 | process-classes
79 |
80 |
81 |
82 |
83 |
84 |
85 |
--------------------------------------------------------------------------------
/CloudStorageCore/src/main/java/com/gkatzioura/maven/cloud/transfer/TransferProgressFileInputStream.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.transfer;
18 |
19 | import java.io.File;
20 | import java.io.FileInputStream;
21 | import java.io.FileNotFoundException;
22 | import java.io.IOException;
23 |
24 | public final class TransferProgressFileInputStream extends FileInputStream {
25 |
26 | private final TransferProgress transferProgress;
27 | private long byteLeft;
28 |
29 |
30 |
31 |
32 | public TransferProgressFileInputStream(File file, TransferProgress transferProgress) throws IOException{
33 | super(file);
34 | this.transferProgress = transferProgress;
35 | resetByteLeft();
36 | }
37 |
38 | private void resetByteLeft() throws IOException {
39 | byteLeft = this.getChannel().size();
40 | }
41 |
42 | @Override
43 | public synchronized void reset() throws IOException {
44 | super.reset();
45 | resetByteLeft();
46 | }
47 |
48 | @Override
49 | public int read() throws IOException {
50 | int b = super.read();
51 | if(b != -1){
52 | this.transferProgress.progress(new byte[]{(byte) b}, 1);
53 | byteLeft--;
54 | }//else we try to read but it was the end of the stream so nothing to report
55 | return b;
56 | }
57 |
58 | @Override
59 | public int read(byte b[]) throws IOException {
60 | int count = super.read(b);
61 | if (count != -1) {
62 | this.transferProgress.progress(b, b.length);
63 | byteLeft -= b.length;
64 | }else{//end of the stream
65 | this.transferProgress.progress(b, Math.toIntExact(byteLeft));
66 | }
67 | return count;
68 | }
69 |
70 | @Override
71 | public int read(byte b[], int off, int len) throws IOException {
72 | int count = super.read(b, off, len);
73 | if (off == 0) {
74 | if (count != -1) {
75 | this.transferProgress.progress(b, count);
76 | byteLeft -= count;
77 | }else{//end of the stream
78 | this.transferProgress.progress(b, Math.toIntExact(byteLeft));
79 | }
80 | } else {
81 | if (count != -1) {
82 | byte[] bytes = new byte[count];
83 | System.arraycopy(b, off, bytes, 0, count);
84 | this.transferProgress.progress(bytes, len);
85 | byteLeft -= count;
86 | }else{//end of the stream
87 | byte[] bytes = new byte[Math.toIntExact(byteLeft)];
88 | System.arraycopy(b, off, bytes, 0, Math.toIntExact(byteLeft));
89 | this.transferProgress.progress(b, Math.toIntExact(byteLeft));
90 | }
91 | }
92 | return count;
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/CloudStorageCore/src/main/java/com/gkatzioura/maven/cloud/listener/TransferListenerContainerImpl.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.listener;
18 |
19 | import java.io.File;
20 | import java.util.Vector;
21 |
22 | import org.apache.maven.wagon.Wagon;
23 | import org.apache.maven.wagon.events.TransferEvent;
24 | import org.apache.maven.wagon.events.TransferListener;
25 | import org.apache.maven.wagon.resource.Resource;
26 |
27 | public class TransferListenerContainerImpl implements TransferListenerContainer {
28 |
29 | private final Wagon wagon;
30 | private final Vector transferListeners;
31 |
32 | public TransferListenerContainerImpl(Wagon wagon) {
33 | this.wagon = wagon;
34 | this.transferListeners = new Vector<>();
35 | }
36 |
37 | @Override
38 | public void addTransferListener(TransferListener transferListener) {
39 | if(transferListener==null) {
40 | throw new NullPointerException();
41 | }
42 | if(!transferListeners.contains(transferListener)) {
43 | transferListeners.add(transferListener);
44 | }
45 | }
46 |
47 | @Override
48 | public void removeTransferListener(TransferListener transferListener) {
49 | transferListeners.remove(transferListener);
50 | }
51 |
52 | @Override
53 | public boolean hasTransferListener(TransferListener transferListener) {
54 | return transferListeners.contains(transferListener);
55 | }
56 |
57 | @Override
58 | public void fireTransferInitiated(Resource resource, int requestType) {
59 | TransferEvent transferEvent = new TransferEvent(this.wagon,resource,TransferEvent.TRANSFER_INITIATED,requestType);
60 | transferListeners.forEach(tl->tl.transferInitiated(transferEvent));
61 | }
62 |
63 | @Override
64 | public void fireTransferStarted(Resource resource, int requestType, File localFile) {
65 | resource.setContentLength(localFile.length());
66 | resource.setLastModified(localFile.lastModified());
67 | TransferEvent transferEvent = new TransferEvent(this.wagon,resource,TransferEvent.TRANSFER_STARTED,requestType);
68 | transferEvent.setLocalFile(localFile);
69 | transferListeners.forEach(tl->tl.transferStarted(transferEvent));
70 | }
71 |
72 | @Override
73 | public void fireTransferProgress(Resource resource, int requestType, byte[] buffer, int length) {
74 | TransferEvent transferEvent = new TransferEvent(this.wagon, resource, TransferEvent.TRANSFER_PROGRESS, requestType);
75 | transferListeners.forEach(tl->tl.transferProgress(transferEvent,buffer,length));
76 | }
77 |
78 | @Override public void fireTransferCompleted(Resource resource, int requestType) {
79 | TransferEvent transferEvent = new TransferEvent(this.wagon, resource, TransferEvent.TRANSFER_COMPLETED, requestType);
80 | transferListeners.forEach(tl->tl.transferCompleted(transferEvent));
81 | }
82 |
83 | @Override public void fireTransferError(Resource resource, int requestType, Exception exception) {
84 | TransferEvent transferEvent = new TransferEvent(this.wagon, resource, exception, requestType);
85 | transferListeners.forEach(tl->tl.transferError(transferEvent));
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/CloudStorageCore/src/main/java/com/gkatzioura/maven/cloud/listener/SessionListenerContainer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.listener;
18 |
19 | import org.apache.maven.wagon.events.SessionListener;
20 |
21 | public interface SessionListenerContainer {
22 |
23 | /**
24 | * Add a {@link SessionListener} to be notified
25 | *
26 | * @param sessionListener The {@link SessionListener} to be notified
27 | */
28 | void addSessionListener(SessionListener sessionListener);
29 |
30 | /**
31 | * Remove a {@link SessionListener} so that it is no longer notified
32 | *
33 | * @param sessionListener The {@link SessionListener} that should no longer be notified
34 | */
35 | void removeSessionListener(SessionListener sessionListener);
36 |
37 | /**
38 | * Returns whether a {@link SessionListener} is already in the collection of {@link SessionListener}s to be notified
39 | *
40 | * @param sessionListener The {@link SessionListener} to look for
41 | * @return {@code true} if the {@link SessionListener} is already in the collection of {@link SessionListener}s to
42 | * be notified, otherwise {@code false}
43 | */
44 | boolean hasSessionListener(SessionListener sessionListener);
45 |
46 | /**
47 | * Notify {@link SessionListener}s that a session is being opened
48 | *
49 | * @see org.apache.maven.wagon.events.SessionEvent#SESSION_OPENING
50 | */
51 | void fireSessionOpening();
52 |
53 | /**
54 | * Notify {@link SessionListener}s that a session has been opened successfully
55 | *
56 | * @see org.apache.maven.wagon.events.SessionEvent#SESSION_OPENED
57 | */
58 | void fireSessionOpened();
59 |
60 | /**
61 | * Notify {@link SessionListener}s that a session is being disconnected
62 | *
63 | * @see org.apache.maven.wagon.events.SessionEvent#SESSION_DISCONNECTING
64 | */
65 | void fireSessionDisconnecting();
66 |
67 | /**
68 | * Notify {@link SessionListener}s that a session has been disconnected successfully
69 | *
70 | * @see org.apache.maven.wagon.events.SessionEvent#SESSION_DISCONNECTED
71 | */
72 | void fireSessionDisconnected();
73 |
74 | /**
75 | * Notify {@link SessionListener}s that creation of the session's connection was refused
76 | *
77 | * @see org.apache.maven.wagon.events.SessionEvent#SESSION_CONNECTION_REFUSED
78 | */
79 | void fireSessionConnectionRefused();
80 |
81 | /**
82 | * Notify {@link SessionListener}s that the session was logged in successfully
83 | *
84 | * @see org.apache.maven.wagon.events.SessionEvent#SESSION_LOGGED_IN
85 | */
86 | void fireSessionLoggedIn();
87 |
88 | /**
89 | * Notify {@link SessionListener}s that the session was logged off successfully
90 | *
91 | * @see org.apache.maven.wagon.events.SessionEvent#SESSION_LOGGED_OFF
92 | */
93 | void fireSessionLoggedOff();
94 |
95 | /**
96 | * Notify {@link SessionListener}s that an error occurred during while the session was in use
97 | *
98 | * @param exception The error that occurred
99 | */
100 | void fireSessionError(Exception exception);
101 |
102 |
103 |
104 | }
105 |
--------------------------------------------------------------------------------
/CloudStorageCore/src/main/java/com/gkatzioura/maven/cloud/listener/SessionListenerContainerImpl.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.listener;
18 |
19 | import java.util.Vector;
20 |
21 | import org.apache.maven.wagon.Wagon;
22 | import org.apache.maven.wagon.events.SessionEvent;
23 | import org.apache.maven.wagon.events.SessionListener;
24 |
25 | public class SessionListenerContainerImpl implements SessionListenerContainer {
26 |
27 | private final Wagon wagon;
28 | private final Vector sessionListeners;
29 |
30 | public SessionListenerContainerImpl(Wagon wagon) {
31 | this.wagon = wagon;
32 | sessionListeners = new Vector<>();
33 | }
34 |
35 | @Override
36 | public void addSessionListener(SessionListener sessionListener) {
37 | if(sessionListener==null) {
38 | throw new NullPointerException();
39 | }
40 | if(!sessionListeners.contains(sessionListener)) {
41 | sessionListeners.add(sessionListener);
42 | }
43 | }
44 |
45 | @Override
46 | public void removeSessionListener(SessionListener sessionListener) {
47 | sessionListeners.remove(sessionListener);
48 | }
49 |
50 | @Override
51 | public boolean hasSessionListener(SessionListener sessionListener) {
52 | return sessionListeners.contains(sessionListener);
53 | }
54 |
55 | @Override
56 | public void fireSessionOpening() {
57 | SessionEvent sessionEvent = new SessionEvent(this.wagon, SessionEvent.SESSION_OPENING);
58 | sessionListeners.forEach(e->e.sessionOpening(sessionEvent));
59 | }
60 |
61 | @Override
62 | public void fireSessionOpened() {
63 | SessionEvent sessionEvent = new SessionEvent(this.wagon, SessionEvent.SESSION_OPENED);
64 | sessionListeners.forEach(e->e.sessionOpened(sessionEvent));
65 | }
66 |
67 | @Override
68 | public void fireSessionDisconnecting() {
69 | SessionEvent sessionEvent = new SessionEvent(this.wagon, SessionEvent.SESSION_DISCONNECTING);
70 | sessionListeners.forEach(e->e.sessionDisconnecting(sessionEvent));
71 | }
72 |
73 | @Override
74 | public void fireSessionDisconnected() {
75 | SessionEvent sessionEvent = new SessionEvent(this.wagon, SessionEvent.SESSION_DISCONNECTED);
76 | sessionListeners.forEach(se->se.sessionDisconnected(sessionEvent));
77 | }
78 |
79 | @Override
80 | public void fireSessionConnectionRefused() {
81 | SessionEvent sessionEvent = new SessionEvent(this.wagon, SessionEvent.SESSION_CONNECTION_REFUSED);
82 | sessionListeners.forEach(se->se.sessionConnectionRefused(sessionEvent));
83 | }
84 |
85 | @Override
86 | public void fireSessionLoggedIn() {
87 | SessionEvent sessionEvent = new SessionEvent(this.wagon, SessionEvent.SESSION_LOGGED_IN);
88 | sessionListeners.forEach(se->se.sessionLoggedIn(sessionEvent));
89 | }
90 |
91 | @Override
92 | public void fireSessionLoggedOff() {
93 | SessionEvent sessionEvent = new SessionEvent(this.wagon, SessionEvent.SESSION_LOGGED_OFF);
94 | sessionListeners.forEach(se->se.sessionLoggedOff(sessionEvent));
95 | }
96 |
97 | @Override
98 | public void fireSessionError(Exception exception) {
99 | SessionEvent sessionEvent = new SessionEvent(this.wagon, exception);
100 | sessionListeners.forEach(se->se.sessionError(sessionEvent));
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/CloudStorageCore/src/main/java/com/gkatzioura/maven/cloud/listener/TransferListenerContainer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.listener;
18 |
19 | import org.apache.maven.wagon.events.TransferListener;
20 | import org.apache.maven.wagon.resource.Resource;
21 |
22 | import java.io.File;
23 |
24 | public interface TransferListenerContainer {
25 |
26 | /**
27 | * Add a {@link TransferListener} to be notified
28 | *
29 | * @param transferListener The {@link TransferListener} to be notified
30 | */
31 | void addTransferListener(TransferListener transferListener);
32 |
33 | /**
34 | * Remove a {@link TransferListener} so that it is no longer notified
35 | *
36 | * @param transferListener The {@link TransferListener} that should no longer be notified
37 | */
38 | void removeTransferListener(TransferListener transferListener);
39 |
40 | /**
41 | * Returns whether a {@link TransferListener} is already in the collection of {@link TransferListener}s to be
42 | * notified
43 | *
44 | * @param transferListener the {@link TransferListener} to look for
45 | * @return {@code true} if the {@link TransferListener} is already in the collection of {@link TransferListener}s to
46 | * be notified, otherwise {@code false}
47 | */
48 | boolean hasTransferListener(TransferListener transferListener);
49 |
50 | /**
51 | * Notify {@link TransferListener}s that a transfer is being initiated
52 | *
53 | * @param resource The resource being transfered
54 | * @param requestType The type of request to be executed
55 | * @see org.apache.maven.wagon.events.TransferEvent#TRANSFER_INITIATED
56 | */
57 | void fireTransferInitiated(Resource resource, int requestType);
58 |
59 | /**
60 | * Notify {@link TransferListener}s that a transfer has started successfully
61 | *
62 | * @param resource The resource being transfered
63 | * @param requestType The type of request being executed
64 | * @param localFile local file used
65 | * @see org.apache.maven.wagon.events.TransferEvent#TRANSFER_STARTED
66 | */
67 | void fireTransferStarted(Resource resource, int requestType, File localFile);
68 |
69 | /**
70 | * Notify {@link TransferListener}s about the progress of a transfer
71 | *
72 | * @param resource The resource being transfered
73 | * @param requestType The type of request being executed
74 | * @param buffer The buffer of bytes being transfered
75 | * @param length The length of the data in the buffer
76 | * @see org.apache.maven.wagon.events.TransferEvent#TRANSFER_PROGRESS
77 | */
78 | void fireTransferProgress(Resource resource, int requestType, byte[] buffer, int length);
79 |
80 | /**
81 | * Notify {@link TransferListener}s that the transfer was completed successfully
82 | *
83 | * @param resource The resource being transfered
84 | * @param requestType The type of request executed
85 | * @see org.apache.maven.wagon.events.TransferEvent#TRANSFER_COMPLETED
86 | */
87 | void fireTransferCompleted(Resource resource, int requestType);
88 |
89 | /**
90 | * Notify {@link TransferListener}s that an error occurred during the transfer
91 | *
92 | * @param resource The resource being transfered
93 | * @param requestType The type of the request being executed
94 | * @param exception The error that occurred
95 | */
96 | void fireTransferError(Resource resource, int requestType, Exception exception);
97 |
98 | }
99 |
--------------------------------------------------------------------------------
/S3StorageWagon/src/test/java/com/gkatzioura/maven/cloud/s3/S3StorageWagonTest.java:
--------------------------------------------------------------------------------
1 | package com.gkatzioura.maven.cloud.s3;
2 |
3 | import com.amazonaws.services.s3.AmazonS3;
4 | import com.amazonaws.services.s3.model.AmazonS3Exception;
5 | import com.amazonaws.services.s3.model.ObjectListing;
6 | import com.amazonaws.services.s3.model.S3ObjectSummary;
7 | import com.gkatzioura.maven.cloud.s3.utils.S3Connect;
8 | import org.apache.maven.wagon.WagonTestCase;
9 | import org.apache.maven.wagon.authentication.AuthenticationInfo;
10 | import org.apache.maven.wagon.repository.Repository;
11 | import org.apache.maven.wagon.resource.Resource;
12 |
13 | import java.io.IOException;
14 | import java.util.Iterator;
15 |
16 | /***
17 | * this class will launch 14 unit test from the apache wagon provider tester
18 | * */
19 | public class S3StorageWagonTest extends WagonTestCase {
20 |
21 | AmazonS3 amazonS3;
22 |
23 | @Override
24 | protected AuthenticationInfo getAuthInfo() {
25 | return null;//this will make the AWS sdk use it's credentials provider chain
26 | }
27 |
28 | @Override
29 | protected String getTestRepositoryUrl() throws IOException {
30 | return "s3://"+getTestBucket()+"/foo";
31 | }
32 |
33 | @Override
34 | protected String getProtocol() {
35 | return "s3";
36 | }
37 |
38 | @Override
39 | protected int getTestRepositoryPort() {
40 | return 0;
41 | }
42 |
43 | //TODO se tit to true and fix the failing tests
44 | protected boolean supportsGetIfNewer()
45 | {
46 | return false;
47 | }
48 |
49 | protected long getExpectedLastModifiedOnGet(Repository repository, Resource resource )
50 | {
51 | return 0;
52 | }
53 |
54 | @Override
55 | protected void setUp() throws Exception {
56 | super.setUp();
57 | //creates the bucket
58 | amazonS3 = S3Connect.connect(getAuthInfo(), null, new EndpointProperty(null), new PathStyleEnabledProperty(null));
59 |
60 | createBucket();
61 | }
62 |
63 |
64 | @Override
65 | protected void tearDown() throws Exception {
66 | super.tearDown();
67 | emptyBucket();
68 | }
69 |
70 | private String getTestBucket() {
71 | return System.getProperty("s3.test.bucket.name", "s3wagontestbucket");
72 | }
73 |
74 |
75 |
76 | /**
77 | * creates the bucket, this is only used for testing purposes hence it is package protected
78 | */
79 | void createBucket(){
80 | if (!bucketExists(getTestBucket())) {
81 | amazonS3.createBucket(getTestBucket());
82 | }
83 | }
84 |
85 | boolean bucketExists(String bucket){
86 | try {
87 | amazonS3.getObjectMetadata(bucket, "");
88 | return true;
89 | } catch (AmazonS3Exception e) {
90 | return false;
91 | }
92 | }
93 |
94 |
95 | /**
96 | * delete the bucket, this is only used for testing hence it is package protected
97 | */
98 | void deleteBucket() {
99 | emptyBucket();
100 | amazonS3.deleteBucket(getTestBucket());
101 | }
102 | /**
103 | * delete the bucket, this is only used for testing hence it is package protected
104 | */
105 | void emptyBucket() {
106 | ObjectListing objectListing = amazonS3.listObjects(getTestBucket());
107 | while (true) {
108 | Iterator objIter = objectListing.getObjectSummaries().iterator();
109 | while (objIter.hasNext()) {
110 | amazonS3.deleteObject(getTestBucket(), objIter.next().getKey());
111 | }
112 |
113 | // If the bucket contains many objects, the listObjects() call
114 | // might not return all of the objects in the first listing. Check to
115 | // see whether the listing was truncated. If so, retrieve the next page of objects
116 | // and delete them.
117 | if (objectListing.isTruncated()) {
118 | objectListing = amazonS3.listNextBatchOfObjects(objectListing);
119 | } else {
120 | break;
121 | }
122 | }
123 | }
124 |
125 |
126 | }
127 |
--------------------------------------------------------------------------------
/GoogleStorageWagon/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | cloud-storage-maven
5 | com.gkatzioura.maven.cloud
6 | 2.3
7 |
8 | 4.0.0
9 |
10 | google-storage-wagon
11 | maven-plugin
12 | Google Cloud storage wagon
13 |
14 |
15 | 1.83.0
16 |
17 |
18 |
19 |
20 | Apache License, Version 2.0
21 | http://www.apache.org/licenses/LICENSE-2.0.txt
22 | repo
23 |
24 |
25 |
26 |
27 | org.apache.maven.wagon
28 | wagon-provider-api
29 | ${wagon.version}
30 | provided
31 |
32 |
33 | com.google.cloud
34 | google-cloud-storage
35 | ${gcs.version}
36 |
37 |
38 | com.gkatzioura.maven.cloud
39 | cloud-storage-core
40 | ${project.parent.version}
41 |
42 |
43 | commons-io
44 | commons-io
45 | ${commons-io.version}
46 |
47 |
48 | org.apache.maven
49 | maven-plugin-api
50 | ${maven.plugin.api.version}
51 | provided
52 |
53 |
54 | org.apache.maven.plugin-tools
55 | maven-plugin-annotations
56 | ${maven.plugin.annotations.version}
57 | provided
58 |
59 |
60 | junit
61 | junit
62 | ${junit.version}
63 | test
64 |
65 |
66 |
67 |
68 |
69 |
70 | org.apache.maven.plugins
71 | maven-plugin-plugin
72 | 3.3
73 |
74 |
75 | default-descriptor
76 | process-classes
77 |
78 |
79 |
80 |
81 | org.apache.maven.plugins
82 | maven-shade-plugin
83 | 3.1.1
84 |
85 |
86 | package
87 |
88 | shade
89 |
90 |
91 |
92 |
93 | com.google
94 | com.vorstella.shade.com.google
95 |
96 |
97 | com.fasterxml.jackson
98 | com.vorstella.shade.com.fasterxml.jackson
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
--------------------------------------------------------------------------------
/S3StorageWagon/src/main/java/com/gkatzioura/maven/cloud/s3/utils/S3Connect.java:
--------------------------------------------------------------------------------
1 | /*
2 | * The copyright of this file belongs to Feedzai. The file cannot be
3 | * reproduced in whole or in part, stored in a retrieval system,
4 | * transmitted in any form, or by any means electronic, mechanical,
5 | * photocopying, or otherwise, without the prior permission of the owner.
6 | *
7 | * © 2019 Feedzai, Strictly Confidential
8 | */
9 | package com.gkatzioura.maven.cloud.s3.utils;
10 |
11 | import com.amazonaws.SdkClientException;
12 | import com.amazonaws.client.builder.AwsClientBuilder;
13 | import com.amazonaws.services.s3.AmazonS3;
14 | import com.amazonaws.services.s3.AmazonS3ClientBuilder;
15 | import com.gkatzioura.maven.cloud.s3.CredentialsFactory;
16 | import com.gkatzioura.maven.cloud.s3.EndpointProperty;
17 | import com.gkatzioura.maven.cloud.s3.PathStyleEnabledProperty;
18 | import com.gkatzioura.maven.cloud.s3.S3StorageRegionProviderChain;
19 | import org.apache.maven.wagon.authentication.AuthenticationException;
20 | import org.apache.maven.wagon.authentication.AuthenticationInfo;
21 |
22 | import java.util.logging.Logger;
23 |
24 | /**
25 | * Utility methods used to connect to Amazon's S3 API.
26 | */
27 | public class S3Connect {
28 |
29 | /**
30 | * A logger for this class.
31 | */
32 | private static final Logger LOGGER = Logger.getLogger(S3Connect.class.getName());
33 |
34 | /**
35 | * Connects to the AWS API. The provided authentication, region, endpoint and path-style are all taken into account
36 | * to create the returned {@link AmazonS3} instance.
37 | *
38 | * @param authenticationInfo When {@code authenticationInfo} is passed as {@code null}, an authentication provider
39 | * that gets the credentials from environment properties, system environment variables or
40 | * other global locations will be used. See the documentation for the
41 | * DefaultAWSCredentialsProviderChain
42 | * class for details.
43 | * @param region The region where the bucket was created in.
44 | * @param endpoint The endpoint/bucket to connect to.
45 | * @param pathStyle A {@link PathStyleEnabledProperty} indicating whether the endpoint/bucket configuration being
46 | * passed is in a path-style configuration. See
47 | * Accessing a Bucket in the S3 documentation.
48 | * @return An instance of {@link AmazonS3} that can be used to send and receive data to the intended endpoint/bucket.
49 | * @throws AuthenticationException if the passed credentials are invalid for connecting to the intended endpoint/bucket.
50 | */
51 | public static AmazonS3 connect(AuthenticationInfo authenticationInfo, String region, EndpointProperty endpoint, PathStyleEnabledProperty pathStyle) throws AuthenticationException {
52 | AmazonS3ClientBuilder builder = null;
53 | try {
54 | builder = createAmazonS3ClientBuilder(authenticationInfo, region, endpoint, pathStyle);
55 |
56 | AmazonS3 amazonS3 = builder.build();
57 |
58 | LOGGER.finer(String.format("Connected to S3 using bucket %s.", endpoint.get()));
59 |
60 | return amazonS3;
61 | } catch (SdkClientException e) {
62 | if (builder != null){
63 | StringBuilder message = new StringBuilder();
64 | message.append("Failed to connect");
65 | if (builder.getEndpoint() != null){
66 | message.append(
67 | String.format(" to endpoint [%s] using region [%s]",
68 | builder.getEndpoint().getServiceEndpoint(),
69 | builder.getEndpoint().getSigningRegion()));
70 |
71 | } else {
72 | message.append(String.format(" using region [%s]", builder.getRegion()));
73 | }
74 | throw new AuthenticationException(message.toString(), e);
75 | }
76 | throw new AuthenticationException("Could not authenticate", e);
77 | }
78 | }
79 |
80 | private static AmazonS3ClientBuilder createAmazonS3ClientBuilder(AuthenticationInfo authenticationInfo, String region, EndpointProperty endpoint, PathStyleEnabledProperty pathStyle) {
81 | final S3StorageRegionProviderChain regionProvider = new S3StorageRegionProviderChain(region);
82 |
83 | AmazonS3ClientBuilder builder;
84 | builder = AmazonS3ClientBuilder.standard().withCredentials(new CredentialsFactory().create(authenticationInfo));
85 |
86 | if (endpoint.isPresent()){
87 | builder.setEndpointConfiguration( new AwsClientBuilder.EndpointConfiguration(endpoint.get(), builder.getRegion()));
88 | } else {
89 | builder.setRegion(regionProvider.getRegion());
90 | }
91 |
92 | builder.setPathStyleAccessEnabled(pathStyle.get());
93 | return builder;
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/GoogleStorageWagon/src/main/java/com/gkatzioura/maven/cloud/gcs/plugin/download/GCSDownloadMojo.java:
--------------------------------------------------------------------------------
1 | package com.gkatzioura.maven.cloud.gcs.plugin.download;
2 |
3 | import java.io.File;
4 | import java.io.IOException;
5 | import java.util.Iterator;
6 | import java.util.List;
7 | import java.util.logging.Level;
8 | import java.util.logging.Logger;
9 | import java.util.stream.Collectors;
10 |
11 | import org.apache.maven.plugin.AbstractMojo;
12 | import org.apache.maven.plugin.MojoExecutionException;
13 | import org.apache.maven.plugin.MojoFailureException;
14 | import org.apache.maven.plugins.annotations.Mojo;
15 | import org.apache.maven.plugins.annotations.Parameter;
16 |
17 | import com.gkatzioura.maven.cloud.KeyIteratorConcated;
18 | import com.gkatzioura.maven.cloud.gcs.StorageFactory;
19 | import com.gkatzioura.maven.cloud.gcs.plugin.PrefixKeysIterator;
20 | import com.google.cloud.storage.Blob;
21 | import com.google.cloud.storage.BlobId;
22 | import com.google.cloud.storage.Storage;
23 |
24 | @Mojo(name = "gcs-download")
25 | public class GCSDownloadMojo extends AbstractMojo {
26 |
27 | @Parameter(property = "gcs-download.bucket")
28 | private String bucket;
29 |
30 | @Parameter(property = "gcs-download.keys")
31 | private List keys;
32 |
33 | @Parameter(property = "gcs-download.downloadPath")
34 | private String downloadPath;
35 |
36 | @Parameter(property = "gcs-download.keyPath")
37 | private String keyPath;
38 |
39 | private final StorageFactory storageFactory = new StorageFactory();
40 | private Storage storage;
41 |
42 | private static final Logger LOGGER = Logger.getLogger(GCSDownloadMojo.class.getName());
43 |
44 | public GCSDownloadMojo() {
45 | }
46 |
47 | public GCSDownloadMojo(String bucket, List keys, String downloadPath) throws MojoExecutionException, MojoFailureException {
48 | this.bucket = bucket;
49 | this.keys = keys;
50 | this.downloadPath = downloadPath;
51 | }
52 |
53 | @Override
54 | public void execute() throws MojoExecutionException, MojoFailureException {
55 | storage = initializeStorage();
56 |
57 | if (keys.size()==1) {
58 | downloadSingleFile(storage,keys.get(0));
59 | return;
60 | }
61 |
62 | List> prefixKeyIterators = keys.stream()
63 | .map(pi -> new PrefixKeysIterator(storage,bucket,pi))
64 | .collect(Collectors.toList());
65 |
66 | Iterator keyIteratorConcated = new KeyIteratorConcated(prefixKeyIterators);
67 |
68 | while (keyIteratorConcated.hasNext()) {
69 | Blob blob = keyIteratorConcated.next();
70 | LOGGER.info("Scheduling blob for download "+blob.getBucket()+" "+blob.getName());
71 | downloadFile(blob);
72 | }
73 | }
74 |
75 | private Storage initializeStorage() throws MojoExecutionException {
76 | if(keyPath==null) {
77 | return storageFactory.createDefault();
78 | } else {
79 | try {
80 | return storageFactory.createWithKeyFile(keyPath);
81 | } catch (IOException e) {
82 | throw new MojoExecutionException("Failed to set Authentication to Google Cloud");
83 | }
84 | }
85 | }
86 |
87 | private void downloadSingleFile(Storage storage,String key) {
88 | File file = new File(downloadPath);
89 |
90 | if(file.getParentFile()!=null) {
91 | file.getParentFile().mkdirs();
92 | }
93 |
94 | Blob blob = storage.get(BlobId.of(bucket, key));
95 | blob.downloadTo(file.toPath());
96 | }
97 |
98 | private void downloadFile(Blob blob) {
99 | LOGGER.log(Level.INFO, "Downloading from bucket " + blob.getBucket() + " with key " + blob.getName());
100 | File file = new File(createFullFilePath(blob.getName()));
101 |
102 | if(file.getParent()!=null) {
103 | file.getParentFile().mkdirs();
104 | }
105 |
106 | if(isDirectory(blob)) {
107 | LOGGER.log(Level.INFO,"Bucket "+blob.getBucket()+" key "+blob.getName()+" is as directory");
108 | return;
109 | }
110 |
111 | LOGGER.info("Downloading file "+blob.getBucket()+" key "+blob.getName()+" to path "+file.toPath());
112 |
113 |
114 | LOGGER.info("Path file "+file.isDirectory()+" "+file.isFile());
115 |
116 | blob.downloadTo(file.toPath());
117 | }
118 |
119 | private final String createFullFilePath(String key) {
120 |
121 | String fullPath = downloadPath+"/"+key;
122 | return fullPath;
123 | }
124 |
125 | /**
126 | * Due to blob.isDirectory is not working as expected will have to check if there are more than two files
127 | * @param blob
128 | * @return
129 | */
130 | private final boolean isDirectory(Blob blob) {
131 | Iterator blobs = storage.list(bucket,
132 | Storage.BlobListOption.prefix(blob.getName()
133 | )).getValues().iterator();
134 |
135 | if(blobs.hasNext()) {
136 | blobs.next();
137 | return blobs.hasNext();
138 | }
139 |
140 | return false;
141 | }
142 |
143 | }
144 |
--------------------------------------------------------------------------------
/S3StorageWagon/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | cloud-storage-maven
5 | com.gkatzioura.maven.cloud
6 | 2.3
7 |
8 | 4.0.0
9 |
10 | s3-storage-wagon
11 | maven-plugin
12 | S3 storage wagon
13 |
14 |
15 | 1.11.743
16 | 4.4.10
17 | 1.7.1
18 |
19 |
20 |
21 |
22 | Apache License, Version 2.0
23 | http://www.apache.org/licenses/LICENSE-2.0.txt
24 | repo
25 |
26 |
27 |
28 |
29 | org.apache.maven.wagon
30 | wagon-provider-api
31 | ${wagon.version}
32 | provided
33 |
34 |
35 | org.apache.maven
36 | maven-plugin-api
37 | ${maven.plugin.api.version}
38 | provided
39 |
40 |
41 | com.amazonaws
42 | aws-java-sdk-s3
43 | ${aws.version}
44 |
45 |
46 | com.amazonaws
47 | aws-java-sdk-sts
48 | ${aws.version}
49 |
50 |
51 | com.gkatzioura.maven.cloud
52 | cloud-storage-core
53 | ${project.parent.version}
54 |
55 |
56 | commons-io
57 | commons-io
58 | ${commons-io.version}
59 |
60 |
61 | org.apache.maven.plugin-tools
62 | maven-plugin-annotations
63 | ${maven.plugin.annotations.version}
64 | provided
65 |
66 |
67 | org.apache.httpcomponents
68 | httpcore
69 | ${httpcore}
70 |
71 |
72 | junit
73 | junit
74 | ${junit.version}
75 | test
76 |
77 |
78 | org.powermock
79 | powermock-module-junit4
80 | ${powermock.version}
81 | test
82 |
83 |
84 | org.powermock
85 | powermock-api-mockito2
86 | ${powermock.version}
87 | test
88 |
89 |
90 | org.apache.maven.wagon
91 | wagon-provider-test
92 | ${wagon.version}
93 | test
94 |
95 |
96 |
97 |
98 |
99 |
100 | org.apache.maven.plugins
101 | maven-plugin-plugin
102 | 3.3
103 |
104 |
105 | default-descriptor
106 | process-classes
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 | no-aws-s3-tests
115 |
116 |
117 | !real-s3-tests
118 |
119 |
120 |
121 |
122 |
123 | maven-surefire-plugin
124 |
125 |
126 |
127 | **/S3StorageWagonTest.*
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
--------------------------------------------------------------------------------
/GoogleStorageWagon/src/main/java/com/gkatzioura/maven/cloud/gcs/plugin/upload/GCSUploadMojo.java:
--------------------------------------------------------------------------------
1 | package com.gkatzioura.maven.cloud.gcs.plugin.upload;
2 |
3 | import java.io.File;
4 | import java.io.FileInputStream;
5 | import java.io.IOException;
6 | import java.io.InputStream;
7 | import java.util.ArrayList;
8 | import java.util.List;
9 |
10 | import org.apache.commons.io.IOUtils;
11 | import org.apache.maven.plugin.AbstractMojo;
12 | import org.apache.maven.plugin.MojoExecutionException;
13 | import org.apache.maven.plugin.MojoFailureException;
14 | import org.apache.maven.plugins.annotations.Mojo;
15 | import org.apache.maven.plugins.annotations.Parameter;
16 |
17 | import com.gkatzioura.maven.cloud.gcs.StorageFactory;
18 | import com.google.cloud.storage.BlobInfo;
19 | import com.google.cloud.storage.Storage;
20 | import com.google.cloud.storage.StorageOptions;
21 |
22 | @Mojo(name = "gcs-upload")
23 | public class GCSUploadMojo extends AbstractMojo {
24 |
25 | @Parameter( property = "gcs-upload.bucket")
26 | private String bucket;
27 |
28 | @Parameter(property = "gcs-upload.path")
29 | private String path;
30 |
31 | @Parameter(property = "gcs-upload.key")
32 | private String key;
33 |
34 | @Parameter(property = "gcs-upload.keyPath")
35 | private String keyPath;
36 |
37 | private final StorageFactory storageFactory = new StorageFactory();
38 |
39 | public GCSUploadMojo() {
40 | }
41 |
42 | /**
43 | * If the path is a file then a file shall be uploaded. If the file specified is a directory
44 | * then the directory shall be uploaded using prefix and the rest files shall be uploaded recursively
45 | * @param bucket
46 | * @param path
47 | * @param key
48 | */
49 | public GCSUploadMojo(String bucket, String path, String key) {
50 | this.bucket = bucket;
51 | this.path = path;
52 | this.key = key;
53 | }
54 |
55 | @Override
56 | public void execute() throws MojoExecutionException, MojoFailureException {
57 | if(bucket == null) {
58 | throw new MojoExecutionException("You need to specify a bucket");
59 | }
60 |
61 | Storage storage = initializeStorage();
62 |
63 | if(isDirectory()){
64 | List filesToUpload = findFilesToUpload(path);
65 |
66 | for(String fileToUpload: filesToUpload) {
67 | keyUpload(storage, generateKeyName(fileToUpload), new File(fileToUpload));
68 | }
69 | } else {
70 | keyUpload(storage, keyIfNull(), new File(path));
71 | }
72 | }
73 |
74 | private Storage initializeStorage() throws MojoExecutionException {
75 | if(keyPath==null) {
76 | return storageFactory.createDefault();
77 | } else {
78 | try {
79 | return storageFactory.createWithKeyFile(keyPath);
80 | } catch (IOException e) {
81 | throw new MojoExecutionException("Failed to set Authentication to Google Cloud");
82 | }
83 | }
84 | }
85 |
86 | private List findFilesToUpload(String filePath) {
87 | List totalFiles = new ArrayList<>();
88 |
89 | File file = new File(filePath);
90 |
91 | if(file.isDirectory()) {
92 | File[] files = file.listFiles();
93 |
94 | for(File lFile: files) {
95 | if(lFile.isDirectory()) {
96 | List filesFound = findFilesToUpload(lFile.getAbsolutePath());
97 | totalFiles.addAll(filesFound);
98 | } else {
99 | totalFiles.add(lFile.getAbsolutePath());
100 | }
101 | }
102 |
103 | } else {
104 | totalFiles.add(file.getAbsolutePath());
105 | }
106 |
107 | return totalFiles;
108 | }
109 |
110 | private void keyUpload(Storage storage, String keyName, File file) throws MojoExecutionException {
111 | BlobInfo blobInfo = BlobInfo.newBuilder(bucket, keyName).build();
112 |
113 | try (InputStream inputStream = new FileInputStream(file)) {
114 | storage.create(blobInfo,IOUtils.toByteArray(inputStream));
115 | } catch (IOException e) {
116 | throw new MojoExecutionException("Failed to upload mojo",e);
117 | }
118 | }
119 |
120 | private boolean isDirectory() {
121 | return new File(path).isDirectory();
122 | }
123 |
124 |
125 | private String generateKeyName(String fullFilePath) {
126 | StringBuilder keyNameBuilder = new StringBuilder();
127 |
128 | String absolutePath = new File(path).getAbsolutePath();
129 |
130 | if(key!=null) {
131 | keyNameBuilder.append(key);
132 | if(!fullFilePath.startsWith("/")) {
133 | keyNameBuilder.append("/");
134 | }
135 | keyNameBuilder.append(fullFilePath.replace(absolutePath,""));
136 | } else {
137 | final String clearFilePath = fullFilePath.replace(absolutePath,"");
138 | final String filePathToAppend = clearFilePath.startsWith("/")? clearFilePath.replaceFirst("/",""):clearFilePath;
139 | keyNameBuilder.append(filePathToAppend);
140 | }
141 | return keyNameBuilder.toString();
142 | }
143 |
144 |
145 | private String keyIfNull() {
146 | if(key==null) {
147 | return new File(path).getName();
148 | } else {
149 | return key;
150 | }
151 | }
152 |
153 | }
154 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 |
5 | com.gkatzioura.maven.cloud
6 | cloud-storage-maven
7 | pom
8 | 2.3
9 |
10 | Cloud Storage
11 | The CloudStorage project enables you to use the storage options of cloud provides (Google Cloud) storage as maven repositories.
12 | https://github.com/gkatzioura/CloudStorage
13 |
14 |
15 |
16 | Apache License, Version 2.0
17 | http://www.apache.org/licenses/LICENSE-2.0.txt
18 | repo
19 |
20 |
21 |
22 |
23 |
24 | Emmanouil Gkatziouras
25 | emmanouil@gmail.com
26 | egkatzioura
27 | https://egkatzioura.com/
28 |
29 |
30 |
31 |
32 | https://github.com/gkatzioura/CloudStorage
33 | scm:git:https://github.com/gkatzioura/CloudStorage.git
34 | scm:git:https://github.com/gkatzioura/CloudStorage.git
35 | HEAD
36 |
37 |
38 |
39 | 3.0.0
40 | 1.2.3
41 | 2.6
42 | 4.12
43 | 3.0
44 | 3.4
45 |
46 |
47 |
48 |
49 | sonatype-nexus-snapshots
50 | Sonatype Nexus Snapshots
51 | http://oss.sonatype.org/content/repositories/snapshots
52 |
53 |
54 | sonatype-nexus-staging
55 | Nexus Release Repository
56 | http://oss.sonatype.org/service/local/staging/deploy/maven2/
57 |
58 | https://oss.sonatype.org/content/groups/public/org/knowm/xchart
59 |
60 |
61 |
62 |
63 |
64 | org.apache.maven.plugins
65 | maven-compiler-plugin
66 | 3.7.0
67 |
68 | 1.8
69 | 1.8
70 |
71 |
72 |
73 | org.apache.maven.plugins
74 | maven-source-plugin
75 |
76 |
77 | attach-sources
78 |
79 | jar
80 |
81 |
82 |
83 |
84 |
85 | org.apache.maven.plugins
86 | maven-javadoc-plugin
87 |
88 |
89 | attach-javadocs
90 |
91 | jar
92 |
93 |
94 |
95 |
96 |
97 | org.apache.maven.plugins
98 | maven-gpg-plugin
99 | 1.6
100 |
101 |
102 | sign-artifacts
103 | verify
104 |
105 | sign
106 |
107 |
108 |
109 |
110 |
111 | org.apache.maven.plugins
112 | maven-release-plugin
113 | 2.4.2
114 |
115 | @{project.version}
116 | true
117 | releases
118 |
119 |
120 |
121 |
122 |
123 |
124 | GoogleStorageWagon
125 | CloudStorageCore
126 | AzureStorageWagon
127 | S3StorageWagon
128 |
129 |
130 |
--------------------------------------------------------------------------------
/AzureStorageWagon/src/main/java/com/gkatzioura/maven/cloud/abs/plugin/upload/ABSUploadMojo.java:
--------------------------------------------------------------------------------
1 | package com.gkatzioura.maven.cloud.abs.plugin.upload;
2 |
3 | import java.io.File;
4 | import java.io.FileInputStream;
5 | import java.io.FileNotFoundException;
6 | import java.io.IOException;
7 | import java.io.InputStream;
8 | import java.net.URISyntaxException;
9 | import java.util.ArrayList;
10 | import java.util.List;
11 |
12 | import org.apache.maven.plugin.AbstractMojo;
13 | import org.apache.maven.plugin.MojoExecutionException;
14 | import org.apache.maven.plugin.MojoFailureException;
15 | import org.apache.maven.plugins.annotations.Mojo;
16 | import org.apache.maven.plugins.annotations.Parameter;
17 | import org.apache.maven.wagon.authentication.AuthenticationException;
18 |
19 | import com.gkatzioura.maven.cloud.abs.ConnectionStringFactory;
20 | import com.microsoft.azure.storage.CloudStorageAccount;
21 | import com.microsoft.azure.storage.StorageException;
22 | import com.microsoft.azure.storage.blob.CloudBlobContainer;
23 | import com.microsoft.azure.storage.blob.CloudBlockBlob;
24 |
25 | import static com.gkatzioura.maven.cloud.abs.ContentTypeResolver.getContentType;
26 |
27 | @Mojo(name = "abs-upload")
28 | public class ABSUploadMojo extends AbstractMojo {
29 |
30 | private CloudStorageAccount cloudStorageAccount;
31 |
32 | @Parameter(property = "abs-upload.container")
33 | private String container;
34 |
35 | @Parameter(property = "abs-upload.path")
36 | private String path;
37 |
38 | @Parameter(property = "abs-upload.key")
39 | private String key;
40 |
41 | public ABSUploadMojo() throws AuthenticationException {
42 | try {
43 | String connectionString = new ConnectionStringFactory().create();
44 | cloudStorageAccount = CloudStorageAccount.parse(connectionString);
45 | } catch (Exception e) {
46 | throw new AuthenticationException("Could not setup azure client", e);
47 | }
48 | }
49 |
50 | public ABSUploadMojo(String container, String path, String key) throws AuthenticationException {
51 | this();
52 | this.container = container;
53 | this.path = path;
54 | this.key = key;
55 | }
56 |
57 | @Override
58 | public void execute() throws MojoExecutionException, MojoFailureException {
59 | try {
60 | CloudBlobContainer blobContainer = cloudStorageAccount.createCloudBlobClient().getContainerReference(container);
61 | blobContainer.getMetadata();
62 |
63 | if(isDirectory()) {
64 | List filesToUpload = findFilesToUpload(path);
65 |
66 | for(String fileToUpload: filesToUpload) {
67 | String generateKeyName = generateKeyName(fileToUpload);
68 | uploadFileToStorage(blobContainer, generateKeyName, new File(fileToUpload));
69 | }
70 | } else {
71 | uploadFileToStorage(blobContainer, keyIfNull(), new File(path));
72 | }
73 |
74 | } catch (StorageException |URISyntaxException e) {
75 | throw new MojoFailureException("Could not get container "+container,e);
76 | }
77 | }
78 |
79 | private List findFilesToUpload(String filePath) {
80 | List totalFiles = new ArrayList<>();
81 |
82 | File file = new File(filePath);
83 |
84 | if(file.isDirectory()) {
85 | File[] files = file.listFiles();
86 |
87 | for(File lFile: files) {
88 | if(lFile.isDirectory()) {
89 | List filesFound = findFilesToUpload(lFile.getAbsolutePath());
90 | totalFiles.addAll(filesFound);
91 | } else {
92 | totalFiles.add(lFile.getAbsolutePath());
93 | }
94 | }
95 |
96 | } else {
97 | totalFiles.add(file.getAbsolutePath());
98 | }
99 |
100 | return totalFiles;
101 | }
102 |
103 | private String generateKeyName(String fullFilePath) {
104 | StringBuilder keyNameBuilder = new StringBuilder();
105 |
106 | String absolutePath = new File(path).getAbsolutePath();
107 |
108 | if(key!=null) {
109 | keyNameBuilder.append(key);
110 | if(!fullFilePath.startsWith("/")) {
111 | keyNameBuilder.append("/");
112 | }
113 | keyNameBuilder.append(fullFilePath.replace(absolutePath,""));
114 | } else {
115 | final String clearFilePath = fullFilePath.replace(absolutePath,"");
116 | final String filePathToAppend = clearFilePath.startsWith("/")? clearFilePath.replaceFirst("/",""):clearFilePath;
117 | keyNameBuilder.append(filePathToAppend);
118 | }
119 | return keyNameBuilder.toString();
120 | }
121 |
122 | private void uploadFileToStorage(CloudBlobContainer blobContainer, String key, File file) throws MojoExecutionException {
123 | try {
124 | CloudBlockBlob blob = blobContainer.getBlockBlobReference(key);
125 | blob.getProperties().setContentType(getContentType(file));
126 |
127 | try (InputStream inputStream = new FileInputStream(file)) {
128 | blob.upload(inputStream, -1);
129 | }
130 | } catch (URISyntaxException| IOException | StorageException e) {
131 | throw new MojoExecutionException("Could not upload file "+file.getName(),e);
132 | }
133 | }
134 |
135 | private boolean isDirectory() {
136 | return new File(path).isDirectory();
137 | }
138 |
139 | private String keyIfNull() {
140 | if(key==null) {
141 | return new File(path).getName();
142 | } else {
143 | return key;
144 | }
145 | }
146 |
147 | }
148 |
--------------------------------------------------------------------------------
/CloudStorageCore/src/main/java/com/gkatzioura/maven/cloud/wagon/AbstractStorageWagon.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.wagon;
18 |
19 | import java.util.logging.Logger;
20 |
21 | import org.apache.maven.wagon.ConnectionException;
22 | import org.apache.maven.wagon.Wagon;
23 | import org.apache.maven.wagon.authentication.AuthenticationException;
24 | import org.apache.maven.wagon.authentication.AuthenticationInfo;
25 | import org.apache.maven.wagon.events.SessionListener;
26 | import org.apache.maven.wagon.events.TransferListener;
27 | import org.apache.maven.wagon.proxy.ProxyInfo;
28 | import org.apache.maven.wagon.proxy.ProxyInfoProvider;
29 | import org.apache.maven.wagon.repository.Repository;
30 |
31 | import com.gkatzioura.maven.cloud.listener.SessionListenerContainer;
32 | import com.gkatzioura.maven.cloud.listener.SessionListenerContainerImpl;
33 | import com.gkatzioura.maven.cloud.listener.TransferListenerContainer;
34 | import com.gkatzioura.maven.cloud.listener.TransferListenerContainerImpl;
35 | import com.gkatzioura.maven.cloud.resolver.BaseDirectoryResolver;
36 | import com.gkatzioura.maven.cloud.resolver.BucketResolver;
37 |
38 | public abstract class AbstractStorageWagon implements Wagon {
39 |
40 | private static final boolean SUPPORTS_DIRECTORY_COPY = true;
41 |
42 | private int connectionTimeOut = 0;
43 | private int readConnectionTimeOut = 0;
44 |
45 | protected Repository repository = null;
46 |
47 | protected final BucketResolver accountResolver;
48 | protected final BaseDirectoryResolver containerResolver;
49 |
50 | protected final SessionListenerContainer sessionListenerContainer;
51 | protected final TransferListenerContainer transferListenerContainer;
52 |
53 | private boolean interactive;
54 |
55 | private static final Logger LOGGER = Logger.getLogger(AbstractStorageWagon.class.getName());
56 |
57 | public AbstractStorageWagon() {
58 | this.accountResolver = new BucketResolver();
59 | this.containerResolver = new BaseDirectoryResolver();
60 | this.sessionListenerContainer = new SessionListenerContainerImpl(this);
61 | this.transferListenerContainer = new TransferListenerContainerImpl(this);
62 | }
63 |
64 | @Override
65 | public boolean supportsDirectoryCopy() {
66 | return SUPPORTS_DIRECTORY_COPY;
67 | }
68 |
69 | @Override
70 | public Repository getRepository() {
71 | return repository;
72 | }
73 |
74 | @Override
75 | public void openConnection() throws ConnectionException, AuthenticationException {
76 | throw new UnsupportedOperationException();
77 | }
78 |
79 | @Override
80 | public void connect(Repository repository) throws ConnectionException, AuthenticationException {
81 | connect(repository,null,(ProxyInfoProvider) null);
82 | }
83 |
84 | @Override
85 | public void connect(Repository repository, ProxyInfo proxyInfo) throws ConnectionException, AuthenticationException {
86 | connect(repository,null,proxyInfo);
87 | }
88 |
89 | @Override
90 | public void connect(Repository repository, ProxyInfoProvider proxyInfoProvider) throws ConnectionException, AuthenticationException {
91 | connect(repository, null, proxyInfoProvider);
92 | }
93 |
94 | @Override
95 | public void connect(Repository repository, AuthenticationInfo authenticationInfo) throws ConnectionException, AuthenticationException {
96 | connect(repository, authenticationInfo, (ProxyInfoProvider) null);
97 | }
98 |
99 | @Override
100 | public void connect(Repository repository, AuthenticationInfo authenticationInfo, ProxyInfo proxyInfo) throws ConnectionException, AuthenticationException {
101 | connect(repository, authenticationInfo, p->{if((p == null) || (proxyInfo == null) || p.equalsIgnoreCase(proxyInfo.getType())) return proxyInfo; else return null;});
102 | }
103 |
104 | @Override
105 | public void setTimeout(int i) {
106 | this.connectionTimeOut = i;
107 | }
108 |
109 | @Override
110 | public int getTimeout() {
111 | return connectionTimeOut;
112 | }
113 |
114 | @Override
115 | public void setReadTimeout(int i) {
116 | readConnectionTimeOut = i;
117 | }
118 |
119 | @Override
120 | public int getReadTimeout() {
121 | return readConnectionTimeOut;
122 | }
123 |
124 | @Override
125 | public void addSessionListener(SessionListener sessionListener) {
126 | sessionListenerContainer.addSessionListener(sessionListener);
127 | }
128 |
129 | @Override
130 | public void removeSessionListener(SessionListener sessionListener) {
131 | sessionListenerContainer.removeSessionListener(sessionListener);
132 | }
133 |
134 | @Override
135 | public boolean hasSessionListener(SessionListener sessionListener) {
136 | return sessionListenerContainer.hasSessionListener(sessionListener);
137 | }
138 |
139 | @Override
140 | public void addTransferListener(TransferListener transferListener) {
141 | transferListenerContainer.addTransferListener(transferListener);
142 | }
143 |
144 | @Override
145 | public void removeTransferListener(TransferListener transferListener) {
146 | transferListenerContainer.removeTransferListener(transferListener);
147 | }
148 |
149 | @Override
150 | public boolean hasTransferListener(TransferListener transferListener) {
151 | return transferListenerContainer.hasTransferListener(transferListener);
152 | }
153 |
154 | @Override
155 | public boolean isInteractive() {
156 | return interactive;
157 | }
158 |
159 | @Override
160 | public void setInteractive(boolean b) {
161 | interactive = b;
162 | }
163 |
164 | }
165 |
--------------------------------------------------------------------------------
/S3StorageWagon/src/main/java/com/gkatzioura/maven/cloud/s3/plugin/download/S3DownloadMojo.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.s3.plugin.download;
18 |
19 | import java.io.File;
20 | import java.io.FileOutputStream;
21 | import java.io.IOException;
22 | import java.util.Iterator;
23 | import java.util.List;
24 | import java.util.logging.Level;
25 | import java.util.logging.Logger;
26 | import java.util.stream.Collectors;
27 |
28 | import org.apache.commons.io.IOUtils;
29 | import org.apache.maven.plugin.AbstractMojo;
30 | import org.apache.maven.plugin.MojoExecutionException;
31 | import org.apache.maven.plugin.MojoFailureException;
32 | import org.apache.maven.plugins.annotations.Mojo;
33 | import org.apache.maven.plugins.annotations.Parameter;
34 | import org.apache.maven.wagon.authentication.AuthenticationException;
35 |
36 | import com.amazonaws.services.s3.AmazonS3;
37 | import com.amazonaws.services.s3.AmazonS3ClientBuilder;
38 | import com.amazonaws.services.s3.S3ClientOptions;
39 | import com.amazonaws.services.s3.model.S3Object;
40 | import com.amazonaws.services.s3.model.S3ObjectInputStream;
41 | import com.gkatzioura.maven.cloud.KeyIteratorConcated;
42 | import com.gkatzioura.maven.cloud.s3.EndpointProperty;
43 | import com.gkatzioura.maven.cloud.s3.PathStyleEnabledProperty;
44 | import com.gkatzioura.maven.cloud.s3.plugin.PrefixKeysIterator;
45 | import com.gkatzioura.maven.cloud.s3.utils.S3Connect;
46 |
47 | @Mojo(name = "s3-download")
48 | public class S3DownloadMojo extends AbstractMojo {
49 |
50 | @Parameter( property = "s3-download.bucket")
51 | private String bucket;
52 |
53 | @Parameter(property = "s3-download.keys")
54 | private List keys;
55 |
56 | @Parameter(property = "s3-download.downloadPath")
57 | private String downloadPath;
58 |
59 | @Parameter(property = "s3-download.region")
60 | private String region;
61 |
62 | private static final String DIRECTORY_CONTENT_TYPE = "application/x-directory";
63 |
64 | private static final Logger LOGGER = Logger.getLogger(S3DownloadMojo.class.getName());
65 |
66 | public S3DownloadMojo() {
67 | }
68 |
69 | public S3DownloadMojo(String bucket, List keys, String downloadPath, String region) {
70 | this.bucket = bucket;
71 | this.keys = keys;
72 | this.downloadPath = downloadPath;
73 | this.region = region;
74 | }
75 |
76 | @Override
77 | public void execute() throws MojoExecutionException, MojoFailureException {
78 | AmazonS3 amazonS3;
79 |
80 | try {
81 | //Sending the authenticationInfo as null will make this use the default S3 authentication, which will only
82 | //look at the environment Java properties or environment variables
83 | amazonS3 = S3Connect.connect(null, region, EndpointProperty.empty(), new PathStyleEnabledProperty(String.valueOf(S3ClientOptions.DEFAULT_PATH_STYLE_ACCESS)));
84 | } catch (AuthenticationException e) {
85 | throw new MojoExecutionException(
86 | String.format("Unable to authenticate to S3 with the available credentials. Make sure to either define the environment variables or System properties defined in https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html.%n" +
87 | "Detail: %s", e.getMessage()),
88 | e);
89 | }
90 |
91 | if (keys.size()==1) {
92 | downloadSingleFile(amazonS3,keys.get(0));
93 | return;
94 | }
95 |
96 | List> prefixKeysIterators = keys.stream()
97 | .map(pi -> new PrefixKeysIterator(amazonS3, bucket, pi))
98 | .collect(Collectors.toList());
99 | Iterator keyIteratorConcated = new KeyIteratorConcated(prefixKeysIterators);
100 |
101 | while (keyIteratorConcated.hasNext()) {
102 |
103 | String key = keyIteratorConcated.next();
104 | downloadFile(amazonS3,key);
105 | }
106 | }
107 |
108 | private void downloadSingleFile(AmazonS3 amazonS3,String key) {
109 | File file = new File(downloadPath);
110 |
111 | if(file.getParentFile()!=null) {
112 | file.getParentFile().mkdirs();
113 | }
114 |
115 | S3Object s3Object = amazonS3.getObject(bucket, key);
116 |
117 | try(S3ObjectInputStream s3ObjectInputStream = s3Object.getObjectContent();
118 | FileOutputStream fileOutputStream = new FileOutputStream(file)
119 | ) {
120 | IOUtils.copy(s3ObjectInputStream,fileOutputStream);
121 | } catch (IOException e) {
122 | LOGGER.log(Level.SEVERE, "Could not download s3 file");
123 | e.printStackTrace();
124 | }
125 | }
126 |
127 | private void downloadFile(AmazonS3 amazonS3,String key) {
128 |
129 | File file = new File(createFullFilePath(key));
130 |
131 | if(file.getParent()!=null) {
132 | file.getParentFile().mkdirs();
133 | }
134 |
135 | S3Object s3Object = amazonS3.getObject(bucket, key);
136 |
137 | if(isDirectory(s3Object)) {
138 | return;
139 | }
140 |
141 | try(S3ObjectInputStream s3ObjectInputStream = s3Object.getObjectContent();
142 | FileOutputStream fileOutputStream = new FileOutputStream(file)
143 | ) {
144 | IOUtils.copy(s3ObjectInputStream,fileOutputStream);
145 | } catch (IOException e) {
146 | LOGGER.log(Level.SEVERE, "Could not download s3 file");
147 | e.printStackTrace();
148 | }
149 | }
150 |
151 | private final String createFullFilePath(String key) {
152 |
153 | String fullPath = downloadPath+"/"+key;
154 | return fullPath;
155 | }
156 |
157 | private final boolean isDirectory(S3Object s3Object) {
158 | return s3Object.getObjectMetadata().getContentType().equals(DIRECTORY_CONTENT_TYPE);
159 | }
160 |
161 |
162 | }
163 |
--------------------------------------------------------------------------------
/GoogleStorageWagon/src/main/java/com/gkatzioura/maven/cloud/gcs/wagon/GoogleStorageRepository.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.gcs.wagon;
18 |
19 | import java.io.File;
20 | import java.io.IOException;
21 | import java.io.InputStream;
22 | import java.nio.ByteBuffer;
23 | import java.util.ArrayList;
24 | import java.util.Collections;
25 | import java.util.List;
26 | import java.util.Optional;
27 | import java.util.logging.Level;
28 | import java.util.logging.Logger;
29 |
30 | import org.apache.maven.wagon.ResourceDoesNotExistException;
31 | import org.apache.maven.wagon.authentication.AuthenticationException;
32 |
33 | import com.gkatzioura.maven.cloud.gcs.StorageFactory;
34 | import com.gkatzioura.maven.cloud.resolver.KeyResolver;
35 | import com.gkatzioura.maven.cloud.wagon.PublicReadProperty;
36 | import com.google.api.gax.paging.Page;
37 | import com.google.cloud.WriteChannel;
38 | import com.google.cloud.storage.Acl;
39 | import com.google.cloud.storage.Blob;
40 | import com.google.cloud.storage.BlobInfo;
41 | import com.google.cloud.storage.Storage;
42 |
43 | public class GoogleStorageRepository {
44 |
45 | private final String bucket;
46 | private final String baseDirectory;
47 | private final KeyResolver keyResolver = new KeyResolver();
48 | private final StorageFactory storageFactory = new StorageFactory();
49 | private final Optional keyPath;
50 | private final PublicReadProperty publicReadProperty;
51 |
52 | private Storage storage;
53 |
54 | private static final Logger LOGGER = Logger.getLogger(GoogleStorageRepository.class.getName());
55 |
56 | public GoogleStorageRepository(Optional keyPath,String bucket, String directory, PublicReadProperty publicReadProperty) {
57 | this.keyPath = keyPath;
58 | this.bucket = bucket;
59 | this.baseDirectory = directory;
60 | this.publicReadProperty = publicReadProperty;
61 | }
62 |
63 | public void connect() throws AuthenticationException {
64 | try {
65 | storage = createStorage();
66 | storage.list(bucket, Storage.BlobListOption.pageSize(1));
67 | } catch (Exception e) {
68 | LOGGER.log(Level.SEVERE,"Could not establish connection with google cloud",e);
69 | throw new AuthenticationException("Please configure you google cloud account by logging using gcloud and specify a default project");
70 | }
71 | }
72 |
73 | private final Storage createStorage() throws IOException {
74 | if(keyPath.isPresent()) {
75 | return storageFactory.createWithKeyFile(keyPath.get());
76 | } else {
77 | return storageFactory.createDefault();
78 | }
79 | }
80 |
81 | public void copy(String resourceName, File destination) throws ResourceDoesNotExistException {
82 |
83 | final String key = resolveKey(resourceName);
84 |
85 | LOGGER.log(Level.FINER,String.format("Downloading key %s from bucket %s into %s",key,bucket ,destination.getAbsolutePath()));
86 |
87 | Blob blob = storage.get(bucket, resolveKey(resourceName));
88 |
89 | if(blob==null) {
90 | LOGGER.log(Level.FINER,String.format("Blob %s does not exist",key));
91 | throw new ResourceDoesNotExistException(key);
92 | }
93 | blob.downloadTo(destination.toPath());
94 | }
95 |
96 | public boolean newResourceAvailable(String resourceName,long timeStamp) {
97 |
98 | final String key = resolveKey(resourceName);
99 |
100 | LOGGER.log(Level.FINER,String.format("Checking if new key %s exists",key));
101 |
102 | Blob blob = storage.get(bucket, key);
103 |
104 | if(blob==null) {
105 | return false;
106 | }
107 |
108 | long updated = blob.getUpdateTime();
109 | return updated>timeStamp;
110 | }
111 |
112 | public void put(InputStream inputStream,String destination) throws IOException {
113 | String key = resolveKey(destination);
114 |
115 | LOGGER.log(Level.FINER,String.format("Uploading key %s ",key));
116 |
117 | BlobInfo blobInfo = applyPublicRead(BlobInfo.newBuilder(bucket,key)).build();
118 |
119 | try(WriteChannel writeChannel = storage.writer(blobInfo)) {
120 |
121 | byte[] buffer = new byte[1024];
122 | int read;
123 |
124 | while ((read = inputStream.read(buffer, 0, buffer.length)) != -1) {
125 | writeChannel.write(ByteBuffer.wrap(buffer,0, read));
126 | }
127 | }
128 | }
129 |
130 | private BlobInfo.Builder applyPublicRead(BlobInfo.Builder builder) {
131 | if(publicReadProperty.get()) {
132 | Acl acl = Acl.newBuilder(Acl.User.ofAllUsers(), Acl.Role.READER).build();
133 | LOGGER.info("Public read was set to true");
134 | return builder.setAcl(Collections.singletonList(acl));
135 |
136 | } else {
137 | return builder;
138 | }
139 | }
140 |
141 | public List list(String path) {
142 |
143 | String key = resolveKey(path);
144 |
145 | LOGGER.log(Level.FINER,String.format("Listing files for %s",path));
146 |
147 | Page page = storage.list(bucket, Storage.BlobListOption.prefix(key));
148 | return totalBlobs(page);
149 | }
150 |
151 | private List totalBlobs(Page page) {
152 |
153 | List blobs = new ArrayList<>();
154 | page.getValues().forEach(bv->blobs.add(bv.getName()));
155 | if(page.hasNextPage()) {
156 | Page newPage = storage.list(bucket,Storage.BlobListOption.pageToken(page.getNextPageToken()));
157 | blobs.addAll(totalBlobs(newPage));
158 | }
159 |
160 | return blobs;
161 | }
162 |
163 | public boolean exists(String resourceName) {
164 | final String key = resolveKey(resourceName);
165 | Blob blob = storage.get(bucket, key);
166 | return blob.exists();
167 | }
168 |
169 | public void disconnect() {
170 | storage = null;
171 | }
172 |
173 | private String resolveKey(String path) {
174 | return keyResolver.resolve(baseDirectory,path);
175 | }
176 |
177 | }
178 |
--------------------------------------------------------------------------------
/AzureStorageWagon/src/main/java/com/gkatzioura/maven/cloud/abs/plugin/download/ABSDownloadMojo.java:
--------------------------------------------------------------------------------
1 | package com.gkatzioura.maven.cloud.abs.plugin.download;
2 |
3 | import java.io.File;
4 | import java.io.FileOutputStream;
5 | import java.io.IOException;
6 | import java.io.InputStream;
7 | import java.net.URISyntaxException;
8 | import java.util.Iterator;
9 | import java.util.List;
10 | import java.util.logging.Level;
11 | import java.util.logging.Logger;
12 | import java.util.stream.Collectors;
13 |
14 | import org.apache.commons.io.IOUtils;
15 | import org.apache.maven.plugin.AbstractMojo;
16 | import org.apache.maven.plugin.MojoExecutionException;
17 | import org.apache.maven.plugin.MojoFailureException;
18 | import org.apache.maven.plugins.annotations.Mojo;
19 | import org.apache.maven.plugins.annotations.Parameter;
20 | import org.apache.maven.wagon.authentication.AuthenticationException;
21 |
22 | import com.gkatzioura.maven.cloud.KeyIteratorConcated;
23 | import com.gkatzioura.maven.cloud.abs.ConnectionStringFactory;
24 | import com.gkatzioura.maven.cloud.abs.plugin.PrefixKeysIterator;
25 | import com.microsoft.azure.storage.CloudStorageAccount;
26 | import com.microsoft.azure.storage.StorageException;
27 | import com.microsoft.azure.storage.blob.BlobInputStream;
28 | import com.microsoft.azure.storage.blob.CloudBlob;
29 | import com.microsoft.azure.storage.blob.CloudBlobContainer;
30 | import com.microsoft.azure.storage.blob.ListBlobItem;
31 |
32 | @Mojo(name = "abs-download")
33 | public class ABSDownloadMojo extends AbstractMojo {
34 |
35 | private CloudStorageAccount cloudStorageAccount;
36 |
37 | @Parameter(property = "abs-download.container")
38 | private String container;
39 |
40 | @Parameter(property = "abs-download.keys")
41 | private List keys;
42 |
43 | @Parameter(property = "abs-download.downloadPath")
44 | private String downloadPath;
45 |
46 | private static final Logger LOGGER = Logger.getLogger(ABSDownloadMojo.class.getName());
47 |
48 | public ABSDownloadMojo(String container, List keys, String downloadPath) throws AuthenticationException {
49 | this();
50 | this.container = container;
51 | this.keys = keys;
52 | this.downloadPath = downloadPath;
53 | }
54 |
55 | public ABSDownloadMojo() throws AuthenticationException {
56 | try {
57 | String connectionString = new ConnectionStringFactory().create();
58 | cloudStorageAccount = CloudStorageAccount.parse(connectionString);
59 | } catch (Exception e) {
60 | throw new AuthenticationException("Could not setup azure client",e);
61 | }
62 | }
63 |
64 | @Override
65 | public void execute() throws MojoExecutionException, MojoFailureException {
66 | try {
67 | CloudBlobContainer blobContainer = cloudStorageAccount.createCloudBlobClient().getContainerReference(container);
68 | blobContainer.getMetadata();
69 |
70 | if (keys.size()==1) {
71 | downloadSingleFile(blobContainer,keys.get(0));
72 | return;
73 | }
74 |
75 | List> prefixKeysIterators = keys.stream()
76 | .map(pi -> new PrefixKeysIterator(blobContainer, pi))
77 | .collect(Collectors.toList());
78 | Iterator keyIteratorConcatenated = new KeyIteratorConcated(prefixKeysIterators);
79 |
80 | while (keyIteratorConcatenated.hasNext()) {
81 | ListBlobItem key = keyIteratorConcatenated.next();
82 | downloadFile(blobContainer,key);
83 | }
84 |
85 | } catch (StorageException |URISyntaxException e) {
86 | throw new MojoFailureException("Could not get container "+container,e);
87 | }
88 | }
89 |
90 | private void downloadSingleFile(CloudBlobContainer cloudBlobContainer,String key) throws MojoExecutionException {
91 | File file = new File(downloadPath);
92 |
93 | if(file.getParentFile()!=null) {
94 | file.getParentFile().mkdirs();
95 | }
96 |
97 | try {
98 | CloudBlob cloudBlob = cloudBlobContainer.getBlobReferenceFromServer(key);
99 |
100 | if(!cloudBlob.exists()) {
101 | LOGGER.log(Level.FINER,"Blob {} does not exist", key);
102 | throw new MojoExecutionException("Could not find blob "+key);
103 | }
104 |
105 | try(BlobInputStream blobInputStream = cloudBlob.openInputStream();
106 | FileOutputStream fileOutputStream = new FileOutputStream(file)
107 | ) {
108 | IOUtils.copy(blobInputStream, fileOutputStream);
109 | } catch (IOException e) {
110 | LOGGER.log(Level.SEVERE, "Could not download abs file");
111 | throw new MojoExecutionException("Could not download abs file "+key);
112 | }
113 | } catch (URISyntaxException| StorageException e) {
114 | throw new MojoExecutionException("Could not fetch abs file "+key,e);
115 | }
116 | }
117 |
118 | private void downloadFile(CloudBlobContainer cloudBlobContainer,ListBlobItem listBlobItem) throws MojoExecutionException {
119 | String key = listBlobItem.getUri().getPath().replace("/"+container+"/","");
120 | File file = new File(createFullFilePath(key));
121 |
122 | if(file.getParent()!=null) {
123 | file.getParentFile().mkdirs();
124 | }
125 |
126 | if(isDirectory(cloudBlobContainer, key)) {
127 | return;
128 | }
129 |
130 | final CloudBlob cloudBlob;
131 |
132 | try {
133 | cloudBlob = cloudBlobContainer.getBlobReferenceFromServer(key);
134 | } catch (URISyntaxException |StorageException e) {
135 | throw new MojoExecutionException("Could not fetch abs file "+key,e);
136 | }
137 |
138 | try(InputStream objectInputStream = cloudBlob.openInputStream();
139 | FileOutputStream fileOutputStream = new FileOutputStream(file)
140 | ) {
141 | IOUtils.copy(objectInputStream,fileOutputStream);
142 | } catch (IOException |StorageException e) {
143 | LOGGER.log(Level.SEVERE, "Could not download abs file");
144 | throw new MojoExecutionException("Could not download abs file "+key,e);
145 | }
146 | }
147 |
148 | private final String createFullFilePath(String key) {
149 | String fullPath = downloadPath+"/"+key;
150 | return fullPath;
151 | }
152 |
153 | private final boolean isDirectory(CloudBlobContainer container, String key) {
154 | try {
155 | return container.getDirectoryReference(key).listBlobs().iterator().hasNext();
156 | } catch (StorageException |URISyntaxException e) {
157 | LOGGER.log(Level.SEVERE, "Abs key is not a directory");
158 | return false;
159 | }
160 | }
161 |
162 | }
163 |
--------------------------------------------------------------------------------
/S3StorageWagon/src/main/java/com/gkatzioura/maven/cloud/s3/plugin/upload/S3UploadMojo.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.s3.plugin.upload;
18 |
19 | import java.io.File;
20 | import java.io.FileInputStream;
21 | import java.io.IOException;
22 | import java.io.InputStream;
23 | import java.util.ArrayList;
24 | import java.util.List;
25 |
26 | import org.apache.maven.plugin.AbstractMojo;
27 | import org.apache.maven.plugin.MojoExecutionException;
28 | import org.apache.maven.plugin.MojoFailureException;
29 | import org.apache.maven.plugins.annotations.Mojo;
30 | import org.apache.maven.plugins.annotations.Parameter;
31 | import org.apache.maven.wagon.authentication.AuthenticationException;
32 |
33 | import com.amazonaws.services.s3.AmazonS3;
34 | import com.amazonaws.services.s3.model.ObjectMetadata;
35 | import com.amazonaws.services.s3.S3ClientOptions;
36 | import com.amazonaws.services.s3.model.PutObjectRequest;
37 |
38 | import com.gkatzioura.maven.cloud.s3.EndpointProperty;
39 | import com.gkatzioura.maven.cloud.s3.PathStyleEnabledProperty;
40 | import com.gkatzioura.maven.cloud.s3.utils.S3Connect;
41 |
42 | @Mojo(name = "s3-upload")
43 | public class S3UploadMojo extends AbstractMojo {
44 |
45 | @Parameter( property = "s3-upload.bucket")
46 | private String bucket;
47 |
48 | @Parameter(property = "s3-upload.path")
49 | private String path;
50 |
51 | @Parameter(property = "s3-upload.key")
52 | private String key;
53 |
54 | @Parameter(property = "s3-upload.region")
55 | private String region;
56 |
57 | public S3UploadMojo() {
58 | }
59 |
60 | /**
61 | * If the path is a file then a file shall be uploaded. If the file specified is a directory
62 | * then the directory shall be uploaded using prefix and the rest files shall be uploaded recursively
63 | * @param bucket
64 | * @param path
65 | * @param key
66 | * @param region
67 | */
68 | public S3UploadMojo(String bucket, String path, String key, String region) {
69 | this.bucket = bucket;
70 | this.path = path;
71 | this.key = key;
72 | this.region = region;
73 | }
74 |
75 | /**
76 | * At least the bucket should be null or else everything else shall be fetched
77 | * @throws MojoExecutionException
78 | * @throws MojoFailureException
79 | */
80 | @Override
81 | public void execute() throws MojoExecutionException, MojoFailureException {
82 | if (bucket == null) {
83 | throw new MojoExecutionException("You need to specify a bucket for the s3-upload goal configuration");
84 | }
85 |
86 | AmazonS3 amazonS3;
87 | try {
88 | //Sending the authenticationInfo as null will make this use the default S3 authentication, which will only
89 | //look at the environment Java properties or environment variables
90 | amazonS3 = S3Connect.connect(null, region, EndpointProperty.empty(), new PathStyleEnabledProperty(String.valueOf(S3ClientOptions.DEFAULT_PATH_STYLE_ACCESS)));
91 | } catch (AuthenticationException e) {
92 | throw new MojoExecutionException(
93 | String.format("Unable to authenticate to S3 with the available credentials. Make sure to either define the environment variables or System properties defined in https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html.%n" +
94 | "Detail: %s", e.getMessage()),
95 | e);
96 | }
97 |
98 | if(isDirectory()){
99 | List filesToUpload = findFilesToUpload(path);
100 |
101 | for(String fileToUpload: filesToUpload) {
102 | keyUpload(amazonS3, generateKeyName(fileToUpload), new File(fileToUpload));
103 | }
104 | } else {
105 | keyUpload(amazonS3, keyIfNull(), new File(path));
106 | }
107 | }
108 |
109 | private void keyUpload(AmazonS3 amazonS3, String keyName, File file) throws MojoExecutionException {
110 | try (InputStream inputStream = new FileInputStream(file)) {
111 | ObjectMetadata objectMetadata = new ObjectMetadata();
112 | objectMetadata.setContentLength(file.length());
113 |
114 | PutObjectRequest putObjectRequest = new PutObjectRequest(bucket, keyName, inputStream, objectMetadata);
115 | amazonS3.putObject(putObjectRequest);
116 | } catch (IOException e) {
117 | throw new MojoExecutionException("Failed to upload mojo",e);
118 | }
119 | }
120 |
121 | private List findFilesToUpload(String filePath) {
122 | List totalFiles = new ArrayList<>();
123 |
124 | File file = new File(filePath);
125 |
126 | if(file.isDirectory()) {
127 | File[] files = file.listFiles();
128 |
129 | for(File lFile: files) {
130 | if(lFile.isDirectory()) {
131 | List filesFound = findFilesToUpload(lFile.getAbsolutePath());
132 | totalFiles.addAll(filesFound);
133 | } else {
134 | totalFiles.add(lFile.getAbsolutePath());
135 | }
136 | }
137 |
138 | } else {
139 | totalFiles.add(file.getAbsolutePath());
140 | }
141 |
142 | return totalFiles;
143 | }
144 |
145 | private boolean isDirectory() {
146 | return new File(path).isDirectory();
147 | }
148 |
149 | private String generateKeyName(String fullFilePath) {
150 | StringBuilder keyNameBuilder = new StringBuilder();
151 |
152 | String absolutePath = new File(path).getAbsolutePath();
153 |
154 | if(key!=null) {
155 | keyNameBuilder.append(key);
156 | if(!fullFilePath.startsWith("/")) {
157 | keyNameBuilder.append("/");
158 | }
159 | keyNameBuilder.append(fullFilePath.replace(absolutePath,""));
160 | } else {
161 | final String clearFilePath = fullFilePath.replace(absolutePath,"");
162 | final String filePathToAppend = clearFilePath.startsWith("/")? clearFilePath.replaceFirst("/",""):clearFilePath;
163 | keyNameBuilder.append(filePathToAppend);
164 | }
165 | return keyNameBuilder.toString();
166 | }
167 |
168 | private String keyIfNull() {
169 | if(key==null) {
170 | return new File(path).getName();
171 | } else {
172 | return key;
173 | }
174 | }
175 |
176 | }
177 |
--------------------------------------------------------------------------------
/S3StorageWagon/README.md:
--------------------------------------------------------------------------------
1 | # S3StorageWagon
2 |
3 | ## Upload/download maven artifacts using s3
4 |
5 | The S3StorageWagon project enables you to upload your artifacts to a google cloud storage bucket.
6 |
7 | ```xml
8 |
9 |
10 |
11 | com.gkatzioura.maven.cloud
12 | s3-storage-wagon
13 | 1.8
14 |
15 |
16 |
17 | ```
18 | Full guide on [wagon](https://egkatzioura.com/2018/04/09/host-your-maven-artifacts-using-amazon-s3/)
19 |
20 | ### Public repos
21 |
22 | You can specify your artifacts to be public and thus getting downloaded without the need for authorised access to your bucket.
23 |
24 | To specify a repo as public you can do it through the settings.xml
25 |
26 | ```xml
27 |
28 | bucket-repo
29 | access_key
30 | access_secret
31 |
32 | eu-west-1
33 | true
34 |
35 |
36 | ```
37 |
38 | You can also use system properties with the mvn command
39 |
40 | ```bash
41 | mvn deploy -DpublicRepository=true
42 | ```
43 |
44 | Or through environmental variables
45 |
46 | ```bash
47 | PUBLIC_REPOSITORY=true mvn deploy
48 | ```
49 |
50 | Then you can use the artifact without any authorised access
51 |
52 | ```xml
53 |
54 |
55 | bucket-repo
56 | https://s3-eu-west-1.amazonaws.com/whatever/snapshot
57 |
58 |
59 | ```
60 |
61 | ## Upload/download files for ci/cd purposes
62 |
63 | Apart from giving a solution to use s3 a maven repository the storage s3-storage-wagon can be used as a plugin in order to
64 | upload and download any items from s3.
65 |
66 | ### Configuration
67 | Note that the configuration set for servers and repositories does not apply to this mode of operation.
68 |
69 | #### Authentication
70 | Authentication must be passed by the environment. See the
71 | AWS S3 API documentation
72 | for a description of all locations where such configuration can be set.
73 |
74 | A simple way to configure this is to define the username and password as Properties available to the maven environment:
75 | ```xml
76 |
77 | access_key
78 | access_secret
79 |
80 | ```
81 |
82 | Alternatively, you may pick any of the other methods mentioned in the link above (e.g., defining the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` environment variables).
83 |
84 |
85 | ### Upload files
86 |
87 | ```xml
88 |
89 |
90 |
91 | com.gkatzioura.maven.cloud
92 | s3-storage-wagon
93 | 1.5-SNAPSHOT
94 |
95 |
96 | upload-single-file
97 | package
98 |
99 | s3-upload
100 |
101 |
102 | yourbucketname
103 | yourbucket-region
104 | /file/path/test.txt
105 | test.txt
106 |
107 |
108 |
109 | upload-multiple-files
110 | package
111 |
112 | s3-upload
113 |
114 |
115 | yourbucketname
116 | yourbucket-region
117 | /path/to/directory/with/files
118 | prefixforfiles
119 |
120 |
121 |
122 | upload-single-file-no-key
123 | package
124 |
125 | s3-upload
126 |
127 |
128 | yourbucketname
129 | yourbucket-region
130 | /file/path/test.txt
131 |
132 |
133 |
134 |
135 |
136 |
137 | ```
138 |
139 | ### Download files
140 |
141 | ```xml
142 |
143 |
144 |
145 | com.gkatzioura.maven.cloud
146 | s3-storage-wagon
147 | 1.5-SNAPSHOT
148 |
149 |
150 | download-multiple-files-to-one-directory
151 | package
152 |
153 | s3-download
154 |
155 |
156 | yourbucketname
157 | /path/to/directory
158 | file1.txt,file2.jpg
159 |
160 |
161 |
162 | download-files-and-files-starting-with-prefix
163 | package
164 |
165 | s3-download
166 |
167 |
168 | yourbucketname
169 | /path/to/directory
170 | prefix,file1.txt,file2.txt
171 |
172 |
173 |
174 | download-single-file
175 | package
176 |
177 | s3-download
178 |
179 |
180 | yourbucketname
181 | /path/to/directory/file.txt
182 | file-to-download.txt
183 |
184 |
185 |
186 |
187 |
188 |
189 | ```
190 |
191 | Full guide on [upload and download](https://egkatzioura.com/2019/01/22/upload-and-download-files-to-s3-using-maven/).
192 |
193 |
194 |
--------------------------------------------------------------------------------
/AzureStorageWagon/src/main/java/com/gkatzioura/maven/cloud/abs/AzureStorageRepository.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.abs;
18 |
19 | import java.io.File;
20 | import java.io.IOException;
21 | import java.io.InputStream;
22 | import java.io.OutputStream;
23 | import java.net.URISyntaxException;
24 | import java.security.InvalidKeyException;
25 | import java.util.ArrayList;
26 | import java.util.Iterator;
27 | import java.util.List;
28 | import java.util.logging.Level;
29 | import java.util.logging.Logger;
30 |
31 | import org.apache.commons.io.IOUtils;
32 | import org.apache.maven.wagon.ResourceDoesNotExistException;
33 | import org.apache.maven.wagon.TransferFailedException;
34 | import org.apache.maven.wagon.authentication.AuthenticationException;
35 | import org.apache.maven.wagon.authentication.AuthenticationInfo;
36 |
37 | import com.gkatzioura.maven.cloud.transfer.TransferProgress;
38 | import com.gkatzioura.maven.cloud.transfer.TransferProgressFileInputStream;
39 | import com.gkatzioura.maven.cloud.transfer.TransferProgressFileOutputStream;
40 | import com.microsoft.azure.storage.CloudStorageAccount;
41 | import com.microsoft.azure.storage.StorageException;
42 | import com.microsoft.azure.storage.blob.CloudBlob;
43 | import com.microsoft.azure.storage.blob.CloudBlobContainer;
44 | import com.microsoft.azure.storage.blob.CloudBlockBlob;
45 | import com.microsoft.azure.storage.blob.ListBlobItem;
46 |
47 | import static com.gkatzioura.maven.cloud.abs.ContentTypeResolver.getContentType;
48 |
49 | public class AzureStorageRepository {
50 |
51 | private final String container;
52 | private final ConnectionStringFactory connectionStringFactory;
53 | private CloudBlobContainer blobContainer;
54 |
55 | private static final Logger LOGGER = Logger.getLogger(AzureStorageRepository.class.getName());
56 |
57 | public AzureStorageRepository(String directory) {
58 | this.connectionStringFactory = new ConnectionStringFactory();
59 | this.container = directory;
60 | }
61 |
62 | public void connect(AuthenticationInfo authenticationInfo) throws AuthenticationException {
63 |
64 | String connectionString = connectionStringFactory.create(authenticationInfo);
65 | try {
66 | CloudStorageAccount cloudStorageAccount = CloudStorageAccount.parse(connectionString);
67 | blobContainer = cloudStorageAccount.createCloudBlobClient().getContainerReference(container);
68 | blobContainer.getMetadata();
69 | } catch (URISyntaxException |InvalidKeyException |StorageException e) {
70 | throw new AuthenticationException("Provide valid credentials");
71 | }
72 | }
73 |
74 | public void copy(String resourceName, File destination, TransferProgress transferProgress) throws ResourceDoesNotExistException {
75 |
76 | LOGGER.log(Level.FINER,String.format("Downloading key %s from container %s into %s", resourceName, container, destination.getAbsolutePath()));
77 |
78 | try {
79 |
80 | CloudBlob cloudBlob = blobContainer.getBlobReferenceFromServer(resourceName);
81 |
82 | if(!cloudBlob.exists()) {
83 | LOGGER.log(Level.FINER,"Blob {} does not exist",resourceName);
84 | throw new ResourceDoesNotExistException(resourceName);
85 | }
86 |
87 | try(OutputStream outputStream = new TransferProgressFileOutputStream(destination, transferProgress);
88 | InputStream inputStream = cloudBlob.openInputStream()) {
89 | IOUtils.copy(inputStream,outputStream);
90 | }
91 | } catch (URISyntaxException |StorageException |IOException e) {
92 | throw new ResourceDoesNotExistException("Could not download file from repo",e);
93 | }
94 | }
95 |
96 | public boolean newResourceAvailable(String resourceName,long timeStamp) throws ResourceDoesNotExistException{
97 |
98 | LOGGER.log(Level.FINER,String.format("Checking if new key %s exists",resourceName));
99 |
100 | try {
101 | CloudBlob cloudBlob = blobContainer.getBlobReferenceFromServer(resourceName);
102 | if(!cloudBlob.exists()) {
103 | return false;
104 | }
105 |
106 | long updated = cloudBlob.getProperties().getLastModified().getTime();
107 | return updated>timeStamp;
108 | } catch (URISyntaxException |StorageException e) {
109 | LOGGER.log(Level.SEVERE,"Could not fetch cloud blob",e);
110 | throw new ResourceDoesNotExistException(resourceName);
111 | }
112 | }
113 |
114 | public void put(File file, String destination,TransferProgress transferProgress) throws TransferFailedException {
115 |
116 | LOGGER.log(Level.FINER,String.format("Uploading key %s ",destination));
117 | try {
118 |
119 | CloudBlockBlob blob = blobContainer.getBlockBlobReference(destination);
120 | blob.getProperties().setContentType(getContentType(file));
121 |
122 | try(InputStream inputStream = new TransferProgressFileInputStream(file,transferProgress)) {
123 | blob.upload(inputStream,-1);
124 | }
125 | } catch (URISyntaxException |StorageException | IOException e) {
126 | LOGGER.log(Level.SEVERE,"Could not fetch cloud blob",e);
127 | throw new TransferFailedException(destination);
128 | }
129 | }
130 |
131 |
132 | public boolean exists(String resourceName) throws TransferFailedException {
133 |
134 | try {
135 | CloudBlockBlob blob = blobContainer.getBlockBlobReference(resourceName);
136 | return blob.exists();
137 | } catch (URISyntaxException |StorageException e) {
138 | LOGGER.log(Level.SEVERE,"Could not fetch cloud blob",e);
139 | throw new TransferFailedException(resourceName);
140 | }
141 | }
142 |
143 | public List list(String path) {
144 |
145 | LOGGER.info(String.format("Listing files for %s",path));
146 |
147 | List blobs = new ArrayList<>();
148 |
149 | Iterable blobItems = blobContainer.listBlobs();
150 | Iterator iterator = blobItems.iterator();
151 |
152 | while (iterator.hasNext()) {
153 |
154 | ListBlobItem blobItem = iterator.next();
155 |
156 | if(blobItem instanceof CloudBlob) {
157 |
158 | CloudBlob cloudBlob = (CloudBlob) blobItem;
159 | blobs.add(cloudBlob.getName());
160 | }
161 | }
162 |
163 | return blobs;
164 | }
165 |
166 | public void disconnect() {
167 | blobContainer = null;
168 | }
169 |
170 | }
171 |
--------------------------------------------------------------------------------
/AzureStorageWagon/src/main/java/com/gkatzioura/maven/cloud/abs/AzureStorageWagon.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.abs;
18 |
19 | import java.io.File;
20 | import java.nio.file.Paths;
21 | import java.util.List;
22 | import java.util.logging.Level;
23 | import java.util.logging.Logger;
24 |
25 | import org.apache.commons.io.FilenameUtils;
26 | import org.apache.maven.wagon.ConnectionException;
27 | import org.apache.maven.wagon.ResourceDoesNotExistException;
28 | import org.apache.maven.wagon.TransferFailedException;
29 | import org.apache.maven.wagon.authentication.AuthenticationException;
30 | import org.apache.maven.wagon.authentication.AuthenticationInfo;
31 | import org.apache.maven.wagon.authorization.AuthorizationException;
32 | import org.apache.maven.wagon.events.TransferEvent;
33 | import org.apache.maven.wagon.proxy.ProxyInfoProvider;
34 | import org.apache.maven.wagon.repository.Repository;
35 | import org.apache.maven.wagon.resource.Resource;
36 |
37 | import com.gkatzioura.maven.cloud.transfer.TransferProgress;
38 | import com.gkatzioura.maven.cloud.transfer.TransferProgressImpl;
39 | import com.gkatzioura.maven.cloud.wagon.AbstractStorageWagon;
40 |
41 | public class AzureStorageWagon extends AbstractStorageWagon {
42 |
43 | private AzureStorageRepository azureStorageRepository;
44 |
45 | private static final Logger LOGGER = Logger.getLogger(AzureStorageWagon.class.getName());
46 |
47 | @Override
48 | public void get(String resourceName, File destination) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
49 |
50 | Resource resource = new Resource(resourceName);
51 | transferListenerContainer.fireTransferInitiated(resource, TransferEvent.REQUEST_GET);
52 | transferListenerContainer.fireTransferStarted(resource, TransferEvent.REQUEST_GET, destination);
53 |
54 | final TransferProgress transferProgress = new TransferProgressImpl(resource, TransferEvent.REQUEST_GET, transferListenerContainer);
55 |
56 | try {
57 | azureStorageRepository.copy(resourceName,destination,transferProgress);
58 | transferListenerContainer.fireTransferCompleted(resource,TransferEvent.REQUEST_GET);
59 | } catch (Exception e) {
60 | transferListenerContainer.fireTransferError(resource,TransferEvent.REQUEST_GET,e);
61 | throw e;
62 | }
63 | }
64 |
65 | @Override
66 | public boolean getIfNewer(String resourceName, File file, long l) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
67 |
68 | Resource resource = new Resource(resourceName);
69 |
70 | try {
71 | if(azureStorageRepository.newResourceAvailable(resourceName, l)) {
72 | get(resourceName,file);
73 | return true;
74 | }
75 |
76 | return false;
77 | } catch (TransferFailedException| ResourceDoesNotExistException| AuthorizationException e) {
78 | this.transferListenerContainer.fireTransferError(resource, TransferEvent.REQUEST_GET, e);
79 | throw e;
80 | }
81 | }
82 |
83 | @Override
84 | public void put(File file, String resourceName) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
85 | resourceName = Paths.get(resourceName).normalize().toString();
86 | Resource resource = new Resource(resourceName);
87 |
88 | LOGGER.log(Level.FINER, String.format("Uploading file %s to %s", file.getAbsolutePath(), resourceName));
89 |
90 | transferListenerContainer.fireTransferInitiated(resource,TransferEvent.REQUEST_PUT);
91 | transferListenerContainer.fireTransferStarted(resource,TransferEvent.REQUEST_PUT, file);
92 | final TransferProgress transferProgress = new TransferProgressImpl(resource, TransferEvent.REQUEST_PUT, transferListenerContainer);
93 |
94 | try {
95 | azureStorageRepository.put(file, resourceName,transferProgress);
96 | transferListenerContainer.fireTransferCompleted(resource, TransferEvent.REQUEST_PUT);
97 | } catch (TransferFailedException e) {
98 | transferListenerContainer.fireTransferError(resource,TransferEvent.REQUEST_PUT,e);
99 | throw e;
100 | }
101 | }
102 |
103 | @Override
104 | public void putDirectory(File source, String destination) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
105 | File[] files = source.listFiles();
106 | if (files != null) {
107 | for (File f : files) {
108 | if (f.isDirectory()) {
109 | putDirectory(f, destination + "/" + f.getName());
110 | } else {
111 | put(f, destination + "/" + f.getName());
112 | }
113 | }
114 | }
115 | }
116 |
117 | @Override
118 | public boolean resourceExists(String resourceName) throws TransferFailedException, AuthorizationException {
119 | try {
120 | return azureStorageRepository.exists(resourceName);
121 | } catch (TransferFailedException e) {
122 | transferListenerContainer.fireTransferError(new Resource(resourceName), TransferEvent.REQUEST_GET, e);
123 | throw e;
124 | }
125 | }
126 |
127 | @Override
128 | public List getFileList(String resourceName) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
129 |
130 | try {
131 | return azureStorageRepository.list(resourceName);
132 | } catch (Exception e) {
133 | transferListenerContainer.fireTransferError(new Resource(resourceName),TransferEvent.REQUEST_GET, e);
134 | throw new TransferFailedException("Could not fetch resource");
135 | }
136 | }
137 |
138 | @Override
139 | public void connect(Repository repository, AuthenticationInfo authenticationInfo, ProxyInfoProvider proxyInfoProvider) throws ConnectionException, AuthenticationException {
140 |
141 | this.repository = repository;
142 | this.sessionListenerContainer.fireSessionOpening();
143 |
144 | try {
145 |
146 | final String account = accountResolver.resolve(repository);
147 | final String container = containerResolver.resolve(repository);
148 |
149 | LOGGER.log(Level.FINER,String.format("Opening connection for account %s and container %s",account,container));
150 |
151 | azureStorageRepository = new AzureStorageRepository(container);
152 | azureStorageRepository.connect(authenticationInfo);
153 | sessionListenerContainer.fireSessionLoggedIn();
154 | sessionListenerContainer.fireSessionOpened();
155 | } catch (Exception e) {
156 | this.sessionListenerContainer.fireSessionConnectionRefused();
157 | throw e;
158 | }
159 | }
160 |
161 | @Override
162 | public void disconnect() throws ConnectionException {
163 | sessionListenerContainer.fireSessionDisconnecting();
164 | azureStorageRepository.disconnect();
165 | sessionListenerContainer.fireSessionLoggedOff();
166 | sessionListenerContainer.fireSessionDisconnected();
167 | }
168 |
169 | }
170 |
--------------------------------------------------------------------------------
/GoogleStorageWagon/src/main/java/com/gkatzioura/maven/cloud/gcs/wagon/GoogleStorageWagon.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.gcs.wagon;
18 |
19 | import java.io.File;
20 | import java.io.FileNotFoundException;
21 | import java.io.IOException;
22 | import java.io.InputStream;
23 | import java.util.List;
24 | import java.util.Optional;
25 | import java.util.logging.Level;
26 | import java.util.logging.Logger;
27 |
28 | import org.apache.maven.wagon.ConnectionException;
29 | import org.apache.maven.wagon.ResourceDoesNotExistException;
30 | import org.apache.maven.wagon.TransferFailedException;
31 | import org.apache.maven.wagon.authentication.AuthenticationException;
32 | import org.apache.maven.wagon.authentication.AuthenticationInfo;
33 | import org.apache.maven.wagon.authorization.AuthorizationException;
34 | import org.apache.maven.wagon.events.TransferEvent;
35 | import org.apache.maven.wagon.proxy.ProxyInfoProvider;
36 | import org.apache.maven.wagon.repository.Repository;
37 | import org.apache.maven.wagon.resource.Resource;
38 |
39 | import com.gkatzioura.maven.cloud.transfer.TransferProgress;
40 | import com.gkatzioura.maven.cloud.transfer.TransferProgressFileInputStream;
41 | import com.gkatzioura.maven.cloud.transfer.TransferProgressImpl;
42 | import com.gkatzioura.maven.cloud.wagon.AbstractStorageWagon;
43 | import com.gkatzioura.maven.cloud.wagon.PublicReadProperty;
44 |
45 | public class GoogleStorageWagon extends AbstractStorageWagon {
46 |
47 | private GoogleStorageRepository googleStorageRepository;
48 | private Optional keyPath;
49 | private Boolean publicRepository;
50 |
51 | private static final Logger LOGGER = Logger.getLogger(GoogleStorageWagon.class.getName());
52 |
53 | @Override
54 | public void get(String resourceName, File destination) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
55 |
56 | Resource resource = new Resource(resourceName);
57 | transferListenerContainer.fireTransferInitiated(resource, TransferEvent.REQUEST_GET);
58 | transferListenerContainer.fireTransferStarted(resource, TransferEvent.REQUEST_GET, destination);
59 |
60 | try {
61 | googleStorageRepository.copy(resourceName, destination);
62 | transferListenerContainer.fireTransferCompleted(resource,TransferEvent.REQUEST_GET);
63 | } catch (Exception e) {
64 | transferListenerContainer.fireTransferError(resource,TransferEvent.REQUEST_GET,e);
65 | throw e;
66 | }
67 | }
68 |
69 | @Override
70 | public boolean getIfNewer(String s, File file, long l) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
71 | if(googleStorageRepository.newResourceAvailable(s, l)) {
72 | get(s,file);
73 | return true;
74 | }
75 |
76 | return false;
77 | }
78 |
79 | @Override
80 | public void put(File file, String resourceName) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
81 |
82 | Resource resource = new Resource(resourceName);
83 |
84 | LOGGER.log(Level.FINER, String.format("Uploading file %s to %s", file.getAbsolutePath(), resourceName));
85 |
86 | transferListenerContainer.fireTransferInitiated(resource,TransferEvent.REQUEST_PUT);
87 | transferListenerContainer.fireTransferStarted(resource,TransferEvent.REQUEST_PUT, file);
88 | final TransferProgress transferProgress = new TransferProgressImpl(resource, TransferEvent.REQUEST_PUT, transferListenerContainer);
89 |
90 | try(InputStream inputStream = new TransferProgressFileInputStream(file, transferProgress)) {
91 | googleStorageRepository.put(inputStream, resourceName);
92 | transferListenerContainer.fireTransferCompleted(resource,TransferEvent.REQUEST_PUT);
93 | } catch (FileNotFoundException e) {
94 | transferListenerContainer.fireTransferError(resource,TransferEvent.REQUEST_PUT,e);
95 | throw new ResourceDoesNotExistException("Faild to transfer artifact",e);
96 | } catch (IOException e) {
97 | transferListenerContainer.fireTransferError(resource,TransferEvent.REQUEST_PUT,e);
98 | throw new TransferFailedException("Faild to transfer artifact",e);
99 | }
100 | }
101 |
102 | @Override
103 | public void putDirectory(File source, String destination) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
104 | File[] files = source.listFiles();
105 | if (files != null) {
106 | for (File f : files) {
107 | put(f, destination + "/" + f.getName());
108 | }
109 | }
110 | }
111 |
112 | @Override
113 | public boolean resourceExists(String resourceName) throws TransferFailedException, AuthorizationException {
114 |
115 | return googleStorageRepository.exists(resourceName);
116 | }
117 |
118 | @Override
119 | public List getFileList(String resourceName) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
120 | try {
121 | return googleStorageRepository.list(resourceName);
122 | } catch (Exception e) {
123 | transferListenerContainer.fireTransferError(new Resource(resourceName),TransferEvent.REQUEST_GET, e);
124 | throw new TransferFailedException("Could not fetch resource");
125 | }
126 | }
127 |
128 | @Override
129 | public void connect(Repository repository, AuthenticationInfo authenticationInfo, ProxyInfoProvider proxyInfoProvider) throws AuthenticationException {
130 | this.repository = repository;
131 | this.sessionListenerContainer.fireSessionOpening();
132 | try {
133 | final String bucket = accountResolver.resolve(repository);
134 | final String directory = containerResolver.resolve(repository);
135 |
136 | LOGGER.log(Level.FINER,String.format("Opening connection for bucket %s and directory %s",bucket,directory));
137 |
138 | googleStorageRepository = new GoogleStorageRepository(keyPath ,bucket, directory, new PublicReadProperty(publicRepository));
139 | googleStorageRepository.connect();
140 | sessionListenerContainer.fireSessionLoggedIn();
141 | sessionListenerContainer.fireSessionOpened();
142 | } catch (AuthenticationException e) {
143 | this.sessionListenerContainer.fireSessionConnectionRefused();
144 | throw e;
145 | }
146 | }
147 |
148 | @Override
149 | public void disconnect() throws ConnectionException {
150 | sessionListenerContainer.fireSessionDisconnecting();
151 | googleStorageRepository.disconnect();
152 | sessionListenerContainer.fireSessionLoggedOff();
153 | sessionListenerContainer.fireSessionDisconnected();
154 | }
155 |
156 | public String getKeyPath() {
157 | return keyPath.get();
158 | }
159 |
160 | public void setKeyPath(String keyPath) {
161 | this.keyPath = Optional.of(keyPath);
162 | }
163 |
164 | public Boolean getPublicRepository() {
165 | return publicRepository;
166 | }
167 |
168 | public void setPublicRepository(Boolean publicRepository) {
169 | this.publicRepository = publicRepository;
170 | }
171 |
172 | }
173 |
--------------------------------------------------------------------------------
/S3StorageWagon/src/main/java/com/gkatzioura/maven/cloud/s3/S3StorageRepository.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.s3;
18 |
19 | import java.io.File;
20 | import java.io.IOException;
21 | import java.io.InputStream;
22 | import java.io.OutputStream;
23 | import java.util.ArrayList;
24 | import java.util.List;
25 | import java.util.logging.Level;
26 | import java.util.logging.Logger;
27 |
28 | import com.gkatzioura.maven.cloud.s3.utils.S3Connect;
29 | import org.apache.commons.io.IOUtils;
30 | import org.apache.maven.wagon.authentication.AuthenticationException;
31 | import org.apache.maven.wagon.authentication.AuthenticationInfo;
32 | import org.apache.maven.wagon.ResourceDoesNotExistException;
33 | import org.apache.maven.wagon.TransferFailedException;
34 |
35 | import com.amazonaws.services.s3.AmazonS3;
36 | import com.amazonaws.services.s3.model.AmazonS3Exception;
37 | import com.amazonaws.services.s3.model.CannedAccessControlList;
38 | import com.amazonaws.services.s3.model.ListObjectsRequest;
39 | import com.amazonaws.services.s3.model.ObjectListing;
40 | import com.amazonaws.services.s3.model.ObjectMetadata;
41 | import com.amazonaws.services.s3.model.PutObjectRequest;
42 | import com.amazonaws.services.s3.model.S3Object;
43 | import com.gkatzioura.maven.cloud.resolver.KeyResolver;
44 | import com.gkatzioura.maven.cloud.transfer.TransferProgress;
45 | import com.gkatzioura.maven.cloud.transfer.TransferProgressFileInputStream;
46 | import com.gkatzioura.maven.cloud.transfer.TransferProgressFileOutputStream;
47 | import com.gkatzioura.maven.cloud.wagon.PublicReadProperty;
48 |
49 | public class S3StorageRepository {
50 |
51 | private final String bucket;
52 | private final String baseDirectory;
53 |
54 | private final KeyResolver keyResolver = new KeyResolver();
55 |
56 | private AmazonS3 amazonS3;
57 | private PublicReadProperty publicReadProperty;
58 |
59 | private static final Logger LOGGER = Logger.getLogger(S3StorageRepository.class.getName());
60 |
61 | public S3StorageRepository(String bucket) {
62 | this.bucket = bucket;
63 | this.baseDirectory = "";
64 | this.publicReadProperty = new PublicReadProperty(false);
65 | }
66 |
67 | public S3StorageRepository(String bucket, PublicReadProperty publicReadProperty) {
68 | this.bucket = bucket;
69 | this.baseDirectory = "";
70 | this.publicReadProperty = publicReadProperty;
71 | }
72 |
73 | public S3StorageRepository(String bucket, String baseDirectory) {
74 | this.bucket = bucket;
75 | this.baseDirectory = baseDirectory;
76 | this.publicReadProperty = new PublicReadProperty(false);
77 | }
78 |
79 | public S3StorageRepository(String bucket, String baseDirectory, PublicReadProperty publicReadProperty) {
80 | this.bucket = bucket;
81 | this.baseDirectory = baseDirectory;
82 | this.publicReadProperty = publicReadProperty;
83 | }
84 |
85 |
86 | public void connect(AuthenticationInfo authenticationInfo, String region, EndpointProperty endpoint, PathStyleEnabledProperty pathStyle) throws AuthenticationException {
87 | this.amazonS3 = S3Connect.connect(authenticationInfo, region, endpoint, pathStyle);
88 | }
89 |
90 | public void copy(String resourceName, File destination, TransferProgress transferProgress) throws TransferFailedException, ResourceDoesNotExistException {
91 |
92 | final String key = resolveKey(resourceName);
93 |
94 | try {
95 |
96 | final S3Object s3Object;
97 | try {
98 | s3Object = amazonS3.getObject(bucket, key);
99 | } catch (AmazonS3Exception e) {
100 | throw new ResourceDoesNotExistException("Resource does not exist");
101 | }
102 | destination.getParentFile().mkdirs();//make sure the folder exists or the outputStream will fail.
103 | try(OutputStream outputStream = new TransferProgressFileOutputStream(destination,transferProgress);
104 | InputStream inputStream = s3Object.getObjectContent()) {
105 | IOUtils.copy(inputStream,outputStream);
106 | }
107 | } catch (AmazonS3Exception |IOException e) {
108 | LOGGER.log(Level.SEVERE,"Could not transfer file", e);
109 | throw new TransferFailedException("Could not download resource "+key);
110 | }
111 | }
112 |
113 | public void put(File file, String destination,TransferProgress transferProgress) throws TransferFailedException {
114 |
115 | final String key = resolveKey(destination);
116 |
117 | try {
118 | try(InputStream inputStream = new TransferProgressFileInputStream(file,transferProgress)) {
119 | PutObjectRequest putObjectRequest = new PutObjectRequest(bucket,key,inputStream,createContentLengthMetadata(file));
120 | applyPublicRead(putObjectRequest);
121 | amazonS3.putObject(putObjectRequest);
122 | }
123 | } catch (AmazonS3Exception | IOException e) {
124 | LOGGER.log(Level.SEVERE,"Could not transfer file ",e);
125 | throw new TransferFailedException("Could not transfer file "+file.getName());
126 | }
127 | }
128 |
129 | private ObjectMetadata createContentLengthMetadata(File file) {
130 | ObjectMetadata metadata = new ObjectMetadata();
131 | metadata.setContentLength(file.length());
132 | return metadata;
133 | }
134 |
135 | public boolean newResourceAvailable(String resourceName,long timeStamp) throws ResourceDoesNotExistException {
136 |
137 | final String key = resolveKey(resourceName);
138 |
139 | LOGGER.log(Level.FINER,String.format("Checking if new key %s exists",key));
140 |
141 | try {
142 | ObjectMetadata objectMetadata = amazonS3.getObjectMetadata(bucket, key);
143 |
144 | long updated = objectMetadata.getLastModified().getTime();
145 | return updated>timeStamp;
146 | } catch (AmazonS3Exception e) {
147 | LOGGER.log(Level.SEVERE,String.format("Could not retrieve %s",key),e);
148 | throw new ResourceDoesNotExistException("Could not retrieve key "+key);
149 | }
150 | }
151 |
152 |
153 | public List list(String path) {
154 |
155 | String key = resolveKey(path);
156 |
157 | ObjectListing objectListing = amazonS3.listObjects(new ListObjectsRequest()
158 | .withBucketName(bucket)
159 | .withPrefix(key));
160 | List objects = new ArrayList<>();
161 | retrieveAllObjects(objectListing, objects);
162 | return objects;
163 | }
164 |
165 | private void applyPublicRead(PutObjectRequest putObjectRequest) {
166 | if(publicReadProperty.get()) {
167 | LOGGER.info("Public read was set to true");
168 | putObjectRequest.withCannedAcl(CannedAccessControlList.PublicRead);
169 | }
170 | }
171 |
172 | private void retrieveAllObjects(ObjectListing objectListing, List objects) {
173 |
174 | objectListing.getObjectSummaries().forEach( os-> objects.add(os.getKey()));
175 |
176 | if(objectListing.isTruncated()) {
177 | ObjectListing nextObjectListing = amazonS3.listNextBatchOfObjects(objectListing);
178 | retrieveAllObjects(nextObjectListing, objects);
179 | }
180 | }
181 |
182 | public boolean exists(String resourceName) {
183 |
184 | final String key = resolveKey(resourceName);
185 |
186 | try {
187 | amazonS3.getObjectMetadata(bucket, key);
188 | return true;
189 | } catch (AmazonS3Exception e) {
190 | return false;
191 | }
192 | }
193 |
194 | public void disconnect() {
195 | amazonS3 = null;
196 | }
197 |
198 | private String resolveKey(String path) {
199 | return keyResolver.resolve(baseDirectory,path);
200 | }
201 |
202 | public String getBucket() {
203 | return bucket;
204 | }
205 |
206 | public String getBaseDirectory() {
207 | return baseDirectory;
208 | }
209 |
210 |
211 | }
212 |
--------------------------------------------------------------------------------
/S3StorageWagon/src/main/java/com/gkatzioura/maven/cloud/s3/S3StorageWagon.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2018 Emmanouil Gkatziouras
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.gkatzioura.maven.cloud.s3;
18 |
19 | import java.io.File;
20 | import java.util.Collection;
21 | import java.util.HashSet;
22 | import java.util.List;
23 | import java.util.Set;
24 | import java.util.logging.Level;
25 | import java.util.logging.Logger;
26 | import java.util.stream.Collectors;
27 |
28 | import com.gkatzioura.maven.cloud.resolver.KeyResolver;
29 | import org.apache.commons.io.FileUtils;
30 | import org.apache.maven.wagon.ConnectionException;
31 | import org.apache.maven.wagon.PathUtils;
32 | import org.apache.maven.wagon.ResourceDoesNotExistException;
33 | import org.apache.maven.wagon.TransferFailedException;
34 | import org.apache.maven.wagon.authentication.AuthenticationException;
35 | import org.apache.maven.wagon.authentication.AuthenticationInfo;
36 | import org.apache.maven.wagon.authorization.AuthorizationException;
37 | import org.apache.maven.wagon.events.TransferEvent;
38 | import org.apache.maven.wagon.proxy.ProxyInfoProvider;
39 | import org.apache.maven.wagon.repository.Repository;
40 | import org.apache.maven.wagon.resource.Resource;
41 |
42 | import com.amazonaws.services.s3.model.AmazonS3Exception;
43 | import com.gkatzioura.maven.cloud.transfer.TransferProgress;
44 | import com.gkatzioura.maven.cloud.transfer.TransferProgressImpl;
45 | import com.gkatzioura.maven.cloud.wagon.AbstractStorageWagon;
46 | import com.gkatzioura.maven.cloud.wagon.PublicReadProperty;
47 |
48 | public class S3StorageWagon extends AbstractStorageWagon {
49 |
50 | private S3StorageRepository s3StorageRepository;
51 | private final KeyResolver keyResolver = new KeyResolver();
52 |
53 | private String region;
54 | private Boolean publicRepository;
55 |
56 | private static final Logger LOGGER = Logger.getLogger(S3StorageWagon.class.getName());
57 | private String endpoint;
58 | private String pathStyleEnabled;
59 |
60 | @Override
61 | public void get(String resourceName, File file) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
62 |
63 | Resource resource = new Resource(resourceName);
64 | transferListenerContainer.fireTransferInitiated(resource, TransferEvent.REQUEST_GET);
65 | transferListenerContainer.fireTransferStarted(resource, TransferEvent.REQUEST_GET, file);
66 |
67 | final TransferProgress transferProgress = new TransferProgressImpl(resource, TransferEvent.REQUEST_GET, transferListenerContainer);
68 |
69 | try {
70 | s3StorageRepository.copy(resourceName,file,transferProgress);
71 | transferListenerContainer.fireTransferCompleted(resource,TransferEvent.REQUEST_GET);
72 | } catch (Exception e) {
73 | transferListenerContainer.fireTransferError(resource,TransferEvent.REQUEST_GET,e);
74 | throw e;
75 | }
76 | }
77 |
78 | @Override
79 | public void put(File file, String resourceName) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
80 |
81 | Resource resource = new Resource(resourceName);
82 |
83 | LOGGER.log(Level.FINER, String.format("Uploading file %s to %s", file.getAbsolutePath(), resourceName));
84 |
85 | transferListenerContainer.fireTransferInitiated(resource,TransferEvent.REQUEST_PUT);
86 | transferListenerContainer.fireTransferStarted(resource,TransferEvent.REQUEST_PUT, file);
87 | final TransferProgress transferProgress = new TransferProgressImpl(resource, TransferEvent.REQUEST_PUT, transferListenerContainer);
88 |
89 | try {
90 | s3StorageRepository.put(file, resourceName,transferProgress);
91 | transferListenerContainer.fireTransferCompleted(resource, TransferEvent.REQUEST_PUT);
92 | } catch (TransferFailedException e) {
93 | transferListenerContainer.fireTransferError(resource,TransferEvent.REQUEST_PUT,e);
94 | throw e;
95 | }
96 | }
97 |
98 | @Override
99 | public boolean getIfNewer(String resourceName, File file, long timeStamp) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
100 |
101 | if(s3StorageRepository.newResourceAvailable(resourceName,timeStamp)) {
102 | get(resourceName,file);
103 | return true;
104 | }
105 |
106 | return false;
107 | }
108 |
109 | @Override
110 | public void putDirectory(File source, String destination) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
111 | Collection allFiles = FileUtils.listFiles(source, null, true);
112 | String relativeDestination = destination;
113 | //removes the initial .
114 | if (destination != null && destination.startsWith(".")){
115 | relativeDestination = destination.length() == 1 ? "" : destination.substring(1);
116 | }
117 | for (File file : allFiles) {
118 | //compute relative path
119 | String relativePath = PathUtils.toRelative(source, file.getAbsolutePath());
120 | put(file, relativeDestination +"/"+relativePath);
121 | }
122 | }
123 |
124 | @Override
125 | public boolean resourceExists(String resourceName) throws TransferFailedException, AuthorizationException {
126 | return s3StorageRepository.exists(resourceName);
127 | }
128 |
129 | @Override
130 | public List getFileList(String s) throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException {
131 | try {
132 | List list = s3StorageRepository.list(s);
133 | list = convertS3ListToMavenFileList(list, s);
134 | if (list.isEmpty()){
135 | throw new ResourceDoesNotExistException(s);//expected by maven
136 | }
137 | return list;
138 | } catch (AmazonS3Exception e) {
139 | throw new TransferFailedException("Could not fetch objects for prefix "+s);
140 | }
141 | }
142 |
143 | //removes the prefix path
144 | //adds folders files
145 | private List convertS3ListToMavenFileList(List list, String path) {
146 | String prefix = keyResolver.resolve( s3StorageRepository.getBaseDirectory(), path);
147 | Set folders = new HashSet<>();
148 | List result = list.stream().map( key -> {
149 | String filePath = key;
150 | //removes the prefix from the object path
151 | if (prefix != null && prefix.length() > 0) {
152 | filePath = key.substring(prefix.length() + 1);
153 | }
154 | extractFolders(folders, filePath);
155 | return filePath;
156 | }).collect(Collectors.toList());
157 | result.addAll(folders);
158 | return result;
159 | }
160 |
161 | private void extractFolders(Set folders, String filePath) {
162 | if (filePath.contains("/")){
163 | String folder = filePath.substring(0, filePath.lastIndexOf('/'));
164 | folders.add(folder +'/');
165 | if (folder.contains("/")) {//recurse
166 | extractFolders(folders, folder);
167 | }//else we already stored it.
168 | }else{
169 | folders.add(filePath);
170 | }
171 | }
172 |
173 | @Override
174 | public void connect(Repository repository, AuthenticationInfo authenticationInfo, ProxyInfoProvider proxyInfoProvider) throws ConnectionException, AuthenticationException {
175 |
176 | this.repository = repository;
177 | this.sessionListenerContainer.fireSessionOpening();
178 |
179 | final String bucket = accountResolver.resolve(repository);
180 | final String directory = containerResolver.resolve(repository);
181 |
182 | LOGGER.log(Level.FINER,String.format("Opening connection for bucket %s and directory %s",bucket,directory));
183 | s3StorageRepository = new S3StorageRepository(bucket, directory, new PublicReadProperty(publicRepository));
184 | s3StorageRepository.connect(authenticationInfo, region, new EndpointProperty(endpoint), new PathStyleEnabledProperty(pathStyleEnabled));
185 |
186 | sessionListenerContainer.fireSessionLoggedIn();
187 | sessionListenerContainer.fireSessionOpened();
188 | }
189 |
190 | @Override
191 | public void disconnect() throws ConnectionException {
192 | sessionListenerContainer.fireSessionDisconnecting();
193 | s3StorageRepository.disconnect();
194 | sessionListenerContainer.fireSessionLoggedOff();
195 | sessionListenerContainer.fireSessionDisconnected();
196 | }
197 |
198 | public String getRegion() {
199 | return region;
200 | }
201 |
202 | public void setRegion(String region) {
203 | this.region = region;
204 | }
205 |
206 | public Boolean getPublicRepository() {
207 | return publicRepository;
208 | }
209 |
210 | public void setPublicRepository(Boolean publicRepository) {
211 | this.publicRepository = publicRepository;
212 | }
213 |
214 | public String getEndpoint() {
215 | return endpoint;
216 | }
217 |
218 | public void setEndpoint(String endpoint) {
219 | this.endpoint = endpoint;
220 | }
221 |
222 | public String getPathStyleAccessEnabled() {
223 | return pathStyleEnabled;
224 | }
225 |
226 | public void setPathStyleAccessEnabled(String pathStyleEnabled) {
227 | this.pathStyleEnabled = pathStyleEnabled;
228 | }
229 |
230 | }
231 |
--------------------------------------------------------------------------------