├── source
├── S3Sync
│ ├── .dockerignore
│ ├── Dockerfile
│ ├── S3Sync.csproj
│ └── Program.cs
├── S3Sync.BenchmarkCore
│ ├── app.debug.setting
│ ├── app.release.setting
│ ├── S3Sync.BenchmarkCore.csproj
│ ├── README.md
│ └── Program.cs
├── docker-compose.yml
├── docker-compose.ci.build.yml
├── S3Sync.Core
│ ├── S3ClientOption.cs
│ ├── Enums
│ │ └── FileSyncStatus.cs
│ ├── S3Sync.Core.csproj
│ ├── Extentions
│ │ ├── DoubleExtensions.cs
│ │ ├── StringExtentions.cs
│ │ ├── EnumerableExtentions.cs
│ │ └── S3ClientExtensions.cs
│ ├── LocalFiles
│ │ ├── SlimFileInfo.cs
│ │ ├── EnumerableFileSystem.cs
│ │ └── FileHashHelper.cs
│ ├── Diagnostics
│ │ └── ExponentialBackoff.cs
│ ├── S3FileHashStatus.cs
│ ├── AmazonCredential.cs
│ ├── SynchronizationResult.cs
│ └── S3Client.cs
└── S3Sync.sln
├── targzip_netcore.sh
├── zip_fullnet.ps1
├── LICENSE.md
├── .gitignore
└── README.md
/source/S3Sync/.dockerignore:
--------------------------------------------------------------------------------
1 | *
2 | !obj/Docker/publish/*
3 | !obj/Docker/empty/
4 |
--------------------------------------------------------------------------------
/targzip_netcore.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 | cd `dirname $0`
3 | cd source/S3Sync/obj/Docker/publish/
4 | tar zcvf ../s3sync_netcore.tar.gz *
--------------------------------------------------------------------------------
/zip_fullnet.ps1:
--------------------------------------------------------------------------------
1 | Compress-Archive -Path "source/S3Sync/bin/Release/net47/*" -DestinationPath "source\S3Sync\obj\Docker\s3sync_netfull.zip" -Force
--------------------------------------------------------------------------------
/source/S3Sync.BenchmarkCore/app.debug.setting:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/source/S3Sync.BenchmarkCore/app.release.setting:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/source/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | s3sync:
5 | image: guitarrapc/s3sync
6 | build:
7 | context: ./S3Sync
8 | dockerfile: Dockerfile
9 |
--------------------------------------------------------------------------------
/source/S3Sync/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM microsoft/dotnet:2.0-runtime
2 | ARG source
3 | WORKDIR /app
4 | ENV S3Sync_LocalRoot=/app/sync
5 | COPY ${source:-obj/Docker/publish} .
6 | CMD ["dotnet", "S3Sync.dll"]
7 |
--------------------------------------------------------------------------------
/source/docker-compose.ci.build.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | ci-build:
5 | image: microsoft/dotnet:2.0-sdk
6 | volumes:
7 | - .:/src
8 | working_dir: /src
9 | command: /bin/bash -c "dotnet restore ./S3Sync.sln && dotnet publish ./S3Sync/S3Sync.csproj -c Release -o ./obj/Docker/publish -f netcoreapp2.0"
10 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/S3ClientOption.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Text;
4 |
5 | namespace S3Sync.Core
6 | {
7 | public class S3ClientOption
8 | {
9 | public bool DryRun { get; set; } = true;
10 | public string ContentType { get; set; }
11 | public string Region { get; set; }
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/Enums/FileSyncStatus.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Text;
4 |
5 | namespace S3Sync.Core
6 | {
7 | ///
8 | /// File Synchronization Status
9 | ///
10 | public enum FileSyncStatus
11 | {
12 | Undefined = 0,
13 | Sync,
14 | DiffExists,
15 | LocalOnly,
16 | RemoteOnly,
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/S3Sync.Core.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | netcoreapp2.0;net47
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/Extentions/DoubleExtensions.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Text;
4 |
5 | namespace S3Sync.Core
6 | {
7 | public static class DoubleExtensions
8 | {
9 | public static double ToRound(this double number, int effectiveDigit = 0)
10 | {
11 | var pow = Math.Pow(10, effectiveDigit);
12 | var result = number > 0
13 | ? Math.Floor((number * pow) + 0.5) / pow
14 | : Math.Ceiling((number * pow) - 0.5) / pow;
15 | return result;
16 | }
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/source/S3Sync/S3Sync.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 |
6 | netcoreapp2.0;net47
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 | Dockerfile
20 |
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/Extentions/StringExtentions.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Text;
4 |
5 | namespace S3Sync.Core
6 | {
7 | public static class StringExtensions
8 | {
9 | ///
10 | /// Concat string arrays into single string.
11 | ///
12 | ///
13 | ///
14 | ///
15 | ///
16 | public static string ToJoinedString(this IEnumerable source, string separator = "")
17 | {
18 | return String.Join(separator, source);
19 | }
20 |
21 | public static string[] SplitEx(this string input, string separator)
22 | {
23 | #if NETCOREAPP2_0
24 | return input.Split(separator, StringSplitOptions.RemoveEmptyEntries);
25 | #else
26 | return input.Split(new[] { separator }, StringSplitOptions.RemoveEmptyEntries);
27 | #endif
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/source/S3Sync.BenchmarkCore/S3Sync.BenchmarkCore.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 |
6 | netcoreapp2.0;net47
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 | Always
23 |
24 |
25 | Always
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 |
2 | The MIT License (MIT)
3 |
4 | Copyright (c) 2017 guitarrapc
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in all
14 | copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 | SOFTWARE.
23 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/Extentions/EnumerableExtentions.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 |
4 | namespace S3Sync.Core.Extentions
5 | {
6 | public static class EnumerableExtentions
7 | {
8 | public static IEnumerable Buffer(this IEnumerable source, int count)
9 | {
10 | if (source == null) throw new ArgumentNullException("source");
11 | if (count <= 0) throw new ArgumentOutOfRangeException("count");
12 |
13 | return BufferCore(source, count);
14 | }
15 |
16 | static IEnumerable BufferCore(this IEnumerable source, int count)
17 | {
18 | var buffer = new T[count];
19 | var index = 0;
20 | foreach (var item in source)
21 | {
22 | buffer[index++] = item;
23 | if (index == count)
24 | {
25 | yield return buffer;
26 | index = 0;
27 | buffer = new T[count];
28 | }
29 | }
30 |
31 | if (index != 0)
32 | {
33 | var dest = new T[index];
34 | Array.Copy(buffer, dest, index);
35 | yield return dest;
36 | }
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/LocalFiles/SlimFileInfo.cs:
--------------------------------------------------------------------------------
1 | using System.IO;
2 | using System.Linq;
3 |
4 | namespace S3Sync.Core.LocalFiles
5 | {
6 | public struct SlimFileInfo
7 | {
8 | public string FullPath { get; private set; }
9 | public string DirectoryName { get; private set; }
10 | public string FileName { get; private set; }
11 | public string RelativePath { get; private set; }
12 | public string MultiplatformRelativePath { get; private set; }
13 | public string RelativeDirectory { get; private set; }
14 | public string MultiplatformRelativeDirectory { get; private set; }
15 |
16 | public SlimFileInfo(string fullPath, string basePath)
17 | {
18 | FullPath = fullPath;
19 | DirectoryName = Path.GetDirectoryName(fullPath);
20 | FileName = Path.GetFileName(fullPath);
21 |
22 | var tempRelativePath = fullPath.Replace(basePath, "");
23 | RelativePath = tempRelativePath.First() != '\\' ? tempRelativePath : tempRelativePath.Substring(1, tempRelativePath.Length - 1);
24 | MultiplatformRelativePath = RelativePath.Replace(@"\", "/");
25 |
26 | var tempRelativeDirectory = RelativePath.Replace(FileName, "");
27 | if (string.IsNullOrEmpty(tempRelativeDirectory))
28 | {
29 | RelativeDirectory = string.Empty;
30 | MultiplatformRelativeDirectory = string.Empty;
31 | }
32 | else
33 | {
34 | RelativeDirectory = tempRelativeDirectory.Last() != '\\' ? tempRelativeDirectory : tempRelativeDirectory.Substring(0, tempRelativeDirectory.Length - 1);
35 | MultiplatformRelativeDirectory = RelativeDirectory.Replace(@"\", "/");
36 | }
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/Diagnostics/ExponentialBackoff.cs:
--------------------------------------------------------------------------------
1 | using Amazon.Runtime;
2 | using Amazon.Runtime.CredentialManagement;
3 | using Amazon.Util;
4 | using System;
5 |
6 | namespace S3Sync.Core.Diagnostics
7 | {
8 | public class ExponentialBackoff
9 | {
10 | readonly Random random;
11 | readonly double minBackoffMilliseconds;
12 | readonly double maxBackoffMilliseconds;
13 | readonly double deltaBackoffMilliseconds;
14 |
15 | int currentPower;
16 |
17 | public ExponentialBackoff(TimeSpan minBackoff, TimeSpan maxBackoff, TimeSpan deltaBackoff)
18 | {
19 | random = new Random();
20 | minBackoffMilliseconds = minBackoff.TotalMilliseconds;
21 | maxBackoffMilliseconds = maxBackoff.TotalMilliseconds;
22 | deltaBackoffMilliseconds = deltaBackoff.TotalMilliseconds;
23 | }
24 |
25 | public TimeSpan GetNextDelay()
26 | {
27 | int delta = (int)((System.Math.Pow(2.0, currentPower) - 1.0) * random.Next((int)(deltaBackoffMilliseconds * 0.8), (int)(deltaBackoffMilliseconds * 1.2)));
28 | int interval = (int)System.Math.Min(checked(minBackoffMilliseconds + delta), maxBackoffMilliseconds);
29 |
30 | if (interval < maxBackoffMilliseconds)
31 | {
32 | currentPower++;
33 | }
34 |
35 | return TimeSpan.FromMilliseconds(interval);
36 | }
37 |
38 | public static class Preset
39 | {
40 | ///
41 | /// Preset for AWS Retry : 00:00:01, 00:00:03, 00:00:07, 00:00:15, 00:00:30...
42 | ///
43 | ///
44 | public static ExponentialBackoff AwsOperation()
45 | {
46 | return new ExponentialBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(30), TimeSpan.FromSeconds(2));
47 | }
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/LocalFiles/EnumerableFileSystem.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.IO;
4 | using System.Linq;
5 |
6 | namespace S3Sync.Core.LocalFiles
7 | {
8 | public class EnumerableFileSystem
9 | {
10 | public IEnumerable Files { get; private set; }
11 | public string[] ExcludeFiles { get; private set; }
12 | public string[] ExcludeDirectories { get; private set; }
13 |
14 | public EnumerableFileSystem()
15 | {
16 | }
17 |
18 | public EnumerableFileSystem(string[] excludeDirectories = null, string[] excludeFiles = null)
19 | {
20 | ExcludeDirectories = excludeDirectories;
21 | ExcludeFiles = excludeFiles;
22 | }
23 |
24 | public IEnumerable EnumerateFiles(string filePath, string pattern = "*", SearchOption option = SearchOption.AllDirectories)
25 | {
26 | Files = Directory.EnumerateFiles(filePath, pattern, option)
27 | .Select(x => new SlimFileInfo(x, filePath));
28 |
29 | // Exclude files under specified directories (respect directory structure)
30 | if (ExcludeDirectories != null && ExcludeDirectories.Any())
31 | {
32 | Files = Files.Where(x => !ExcludeDirectories.Any(y => x.MultiplatformRelativeDirectory.StartsWith(y)));
33 | }
34 |
35 | // Exclude specified name files (Just matching filename, ignore directory structure.)
36 | if (ExcludeFiles != null && ExcludeFiles.Any())
37 | {
38 | Files = Files.Where(x => !ExcludeFiles.Contains(x.FileName));
39 | }
40 | return Files;
41 | }
42 |
43 | public void Save()
44 | {
45 | // TODO : Save enumerate result cache to file
46 | throw new NotImplementedException();
47 | }
48 |
49 | public void Load()
50 | {
51 | // TODO : read enumerate result cache from file
52 | throw new NotImplementedException();
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/S3FileHashStatus.cs:
--------------------------------------------------------------------------------
1 | using Amazon.S3.Model;
2 | using S3Sync.Core.LocalFiles;
3 |
4 | namespace S3Sync.Core
5 | {
6 | public struct S3FileHashStatus
7 | {
8 | public FileSyncStatus FileSyncStatus { get; }
9 | public bool IsFileMatchS3ETag { get; }
10 | public SlimFileInfo? FileInfo { get; }
11 | public string FileHashHexString { get; }
12 | public int ChunkSize { get; }
13 | public S3Object S3Object { get; }
14 | public string S3Etag { get; }
15 |
16 | public S3FileHashStatus(SlimFileInfo? fileInfo, string fileHashHexString, int chunkSize, S3Object s3Object)
17 | {
18 | FileInfo = fileInfo;
19 | FileHashHexString = fileHashHexString;
20 | ChunkSize = chunkSize;
21 | S3Object = s3Object;
22 | S3Etag = S3Object?.GetETag();
23 | IsFileMatchS3ETag = FileHashHexString == S3Etag;
24 |
25 | // Local : Not exists.
26 | // S3 : Exists
27 | if (!FileInfo.HasValue && S3Object != null)
28 | {
29 | FileSyncStatus = FileSyncStatus.RemoteOnly;
30 | }
31 | // Local : Exsits
32 | // S3 : Not exsits
33 | else if (FileInfo.HasValue && S3Object == null)
34 | {
35 | FileSyncStatus = FileSyncStatus.LocalOnly;
36 | }
37 | // Unmatch Calculated Etag and S3Object ETag.
38 | // Possible : Rewritten on Remote OR rewriteten on Local
39 | else if (!IsFileMatchS3ETag)
40 | {
41 | FileSyncStatus = FileSyncStatus.DiffExists;
42 | }
43 | // Match Calculated Etag and S3Object ETag.
44 | else if (IsFileMatchS3ETag)
45 | {
46 | FileSyncStatus = FileSyncStatus.Sync;
47 | }
48 | // Status is invalid. Not expected at all. (May be new AWS Implementation?)
49 | else
50 | {
51 | FileSyncStatus = FileSyncStatus.Undefined;
52 | }
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/AmazonCredential.cs:
--------------------------------------------------------------------------------
1 | using Amazon.Runtime;
2 | using Amazon.Runtime.CredentialManagement;
3 | using Amazon.Util;
4 | using System;
5 |
6 | namespace S3Sync.Core
7 | {
8 | ///
9 | /// Best practice. Run with IAM Role. like IAM Instance Profile.
10 | /// ----------------------------------
11 | /// var s3 = new S3Client();
12 | ///
13 | /// Local run, using AccessKey, AccessSecret in Environment Variable.
14 | /// ----------------------------------
15 | /// AWS_ACCESS_KEY_ID="YOUR_ACCES_KEY"
16 | /// AWS_SECRET_ACCESS_KEY="YOUR_ACCESS_SECRET"
17 | /// var s3 = new S3Client();
18 | ///
19 | /// Local run, using AWS CredentialProfile.
20 | /// ----------------------------------
21 | /// var s3 = new S3Client(AmazonCredential.GetCredential("Credential_Profile_Name"));
22 | ///
23 | /// Not recommend. Use AccessKey.
24 | /// ----------------------------------
25 | /// AmazonCredential.RegisterProfile("Credential_Profile_Name", "accessKey", "accessSecret");
26 | /// var s3 = new S3Client(AmazonCredential.GetCredential("Credential_Profile_Name"));
27 | ///
28 | public static class AmazonCredential
29 | {
30 | public static AWSCredentials GetCredential(string profileName)
31 | {
32 | var chain = new CredentialProfileStoreChain();
33 | if (chain.TryGetProfile(profileName, out var profile) && chain.TryGetAWSCredentials(profileName, out var credentials))
34 | {
35 | return credentials;
36 | }
37 | throw new NullReferenceException($"{nameof(profileName)} not found from exsiting profile list. Make sure you have set Profile");
38 | }
39 |
40 | public static void RegisterProfile(string profileName, string accessKey, string accessSecret)
41 | {
42 | var option = new CredentialProfileOptions
43 | {
44 | AccessKey = accessKey,
45 | SecretKey = accessSecret
46 | };
47 | new NetSDKCredentialsFile().RegisterProfile(new CredentialProfile(profileName, option));
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/Extentions/S3ClientExtensions.cs:
--------------------------------------------------------------------------------
1 | using Amazon.S3.Model;
2 | using System;
3 | using System.Collections.Generic;
4 | using System.Linq;
5 |
6 | namespace S3Sync.Core
7 | {
8 | public static class S3ClientExtensions
9 | {
10 | ///
11 | /// Convert to Enumerable S3Object
12 | ///
13 | ///
14 | ///
15 | public static IEnumerable ToS3Objects(this IEnumerable source)
16 | {
17 | return source.SelectMany(x => x.S3Objects);
18 | }
19 |
20 | ///
21 | /// Pick up raw ETag string without "" defined in RFC
22 | ///
23 | ///
24 | ///
25 | public static string GetEtag(this GetObjectResponse source)
26 | {
27 | if (source == null) throw new ArgumentNullException();
28 | return source.ETag.Replace("\"", "");
29 | }
30 |
31 | ///
32 | /// Pick up raw ETag string without "" defined in RFC
33 | ///
34 | ///
35 | ///
36 | public static string GetETag(this S3Object source)
37 | {
38 | if (source == null) throw new ArgumentNullException();
39 | return source.ETag.Replace("\"", "");
40 | }
41 |
42 | ///
43 | /// Pick up Chunk count when Multipart ETag.
44 | ///
45 | ///
46 | ///
47 | public static int GetETagChunkCount(this S3Object source)
48 | {
49 | if (source == null) throw new ArgumentNullException();
50 |
51 | var eTag = source.GetETag();
52 | if (eTag.Length == 32) return 0;
53 | return int.Parse(eTag.Split('-')[1]);
54 | }
55 |
56 | ///
57 | /// Return S3Object except KeyPrefix is startswith specified.
58 | ///
59 | ///
60 | ///
61 | ///
62 | public static IEnumerable IgnorePrefix(this IEnumerable source, string keyPrefix)
63 | {
64 | return source.Where(x => !x.Key.StartsWith(keyPrefix));
65 | }
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/source/S3Sync.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio 15
4 | VisualStudioVersion = 15.0.26730.15
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "S3Sync.Core", "S3Sync.Core\S3Sync.Core.csproj", "{8024CC01-FC60-4427-A4C2-FFAAB016609A}"
7 | EndProject
8 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "S3Sync.BenchmarkCore", "S3Sync.BenchmarkCore\S3Sync.BenchmarkCore.csproj", "{722F8FE8-6EAE-483F-AA53-14ED4742F488}"
9 | EndProject
10 | Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Docs", "Docs", "{7E5B2E7C-C960-4575-AAA2-F7CADD71FF84}"
11 | ProjectSection(SolutionItems) = preProject
12 | ..\README.md = ..\README.md
13 | EndProjectSection
14 | EndProject
15 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "S3Sync", "S3Sync\S3Sync.csproj", "{88ABA00B-B833-44C5-AE3B-CC727738EEC4}"
16 | EndProject
17 | Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Docker", "Docker", "{81E5430B-314C-4531-8BD6-D5311A18FC6E}"
18 | ProjectSection(SolutionItems) = preProject
19 | docker-compose.ci.build.yml = docker-compose.ci.build.yml
20 | docker-compose.yml = docker-compose.yml
21 | EndProjectSection
22 | EndProject
23 | Global
24 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
25 | Debug|Any CPU = Debug|Any CPU
26 | Release|Any CPU = Release|Any CPU
27 | EndGlobalSection
28 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
29 | {8024CC01-FC60-4427-A4C2-FFAAB016609A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
30 | {8024CC01-FC60-4427-A4C2-FFAAB016609A}.Debug|Any CPU.Build.0 = Debug|Any CPU
31 | {8024CC01-FC60-4427-A4C2-FFAAB016609A}.Release|Any CPU.ActiveCfg = Release|Any CPU
32 | {8024CC01-FC60-4427-A4C2-FFAAB016609A}.Release|Any CPU.Build.0 = Release|Any CPU
33 | {722F8FE8-6EAE-483F-AA53-14ED4742F488}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
34 | {722F8FE8-6EAE-483F-AA53-14ED4742F488}.Debug|Any CPU.Build.0 = Debug|Any CPU
35 | {722F8FE8-6EAE-483F-AA53-14ED4742F488}.Release|Any CPU.ActiveCfg = Release|Any CPU
36 | {722F8FE8-6EAE-483F-AA53-14ED4742F488}.Release|Any CPU.Build.0 = Release|Any CPU
37 | {88ABA00B-B833-44C5-AE3B-CC727738EEC4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
38 | {88ABA00B-B833-44C5-AE3B-CC727738EEC4}.Debug|Any CPU.Build.0 = Debug|Any CPU
39 | {88ABA00B-B833-44C5-AE3B-CC727738EEC4}.Release|Any CPU.ActiveCfg = Release|Any CPU
40 | {88ABA00B-B833-44C5-AE3B-CC727738EEC4}.Release|Any CPU.Build.0 = Release|Any CPU
41 | EndGlobalSection
42 | GlobalSection(SolutionProperties) = preSolution
43 | HideSolutionNode = FALSE
44 | EndGlobalSection
45 | GlobalSection(ExtensibilityGlobals) = postSolution
46 | SolutionGuid = {B82EF28F-0C55-4660-8E07-32E14925AF8D}
47 | EndGlobalSection
48 | EndGlobal
49 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/SynchronizationResult.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.Linq;
4 | using System.Text;
5 |
6 | namespace S3Sync.Core
7 | {
8 | public class SynchronizationResult
9 | {
10 | public int TotalCount { get { return New + Update + Skip + Remove; } }
11 | public int New { get; set; }
12 | public int Update { get; set; }
13 | public int Skip { get; set; }
14 | public int Remove { get; set; }
15 | public bool DryRun { get; set; }
16 |
17 | ///
18 | /// Markdown Friendly Table format
19 | ///
20 | ///
21 | public string ToMarkdown()
22 | {
23 | var totalItem = new MarkDownTabkeItem(TotalCount, nameof(TotalCount));
24 | var newItem = new MarkDownTabkeItem(New, nameof(New));
25 | var updateItem = new MarkDownTabkeItem(Update, nameof(Update));
26 | var skipItem = new MarkDownTabkeItem(Skip, nameof(Skip));
27 | var removeItem = new MarkDownTabkeItem(Remove, nameof(Remove));
28 | var isDryItem = new MarkDownTabkeItem(DryRun, nameof(DryRun));
29 |
30 | return $@"| {totalItem.Title} | {newItem.Title} | {updateItem.Title} | {skipItem.Title} | {removeItem.Title} | {isDryItem.Title} |
31 | | {totalItem.Separator}: | {newItem.Separator}: | {updateItem.Separator}: | {skipItem.Separator}: | {removeItem.Separator}: | {isDryItem.Separator}: |
32 | | {totalItem.Value} | {newItem.Value} | {updateItem.Value} | {skipItem.Value} | {removeItem.Value} | {isDryItem.Value} |";
33 | }
34 |
35 | private class MarkDownTabkeItem
36 | {
37 | public string Separator { get; set; }
38 | public string Title { get; set; }
39 | public string Value { get; set; }
40 |
41 | public MarkDownTabkeItem(int count, string name)
42 | {
43 | var max = Math.Max(name.Length, count.ToString().Length);
44 | Separator = new string('-', max);
45 | Title = PaddingTitle(max, name);
46 | Value = PaddingTitle(max, count.ToString());
47 | }
48 |
49 | public MarkDownTabkeItem(bool state, string name)
50 | {
51 | var max = Math.Max(name.Length, state.ToString().Length);
52 | Separator = new string('-', max);
53 | Title = PaddingTitle(max, name);
54 | Value = PaddingTitle(max, state.ToString());
55 | }
56 |
57 | private int GetMax(string left, int right)
58 | {
59 | return Math.Max(left.Length, right.ToString().Length);
60 | }
61 |
62 | private string PaddingTitle(int max, string text)
63 | {
64 | return string.Format($"{{0, {max}}}", text);
65 | }
66 | }
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/source/S3Sync.BenchmarkCore/README.md:
--------------------------------------------------------------------------------
1 |
2 | # Benchmarks
3 |
4 | ## Benchmark (55 items)
5 |
6 | | Method | Runtime | Toolchain | Mean | Error | Scaled | Gen 0 | Gen 1 | Gen 2 | Allocated |
7 | |----------------------------------------- |-------- |----------- |-----------:|------:|-------:|----------:|----------:|----------:|------------:|
8 | | ConcurrentDirectoryUploadPartsize16CpuX1 | Clr | Default | 2,852.9 ms | NA | 1.00 | 5187.5000 | 5187.5000 | 5187.5000 | 25236.25 KB |
9 | | ConcurrentDirectoryUploadPartsize16CpuX2 | Clr | Default | 3,011.1 ms | NA | 1.06 | 5187.5000 | 5187.5000 | 5187.5000 | 25236.75 KB |
10 | | ConcurrentDirectoryUploadPartsize5CpuX1 | Clr | Default | 2,704.7 ms | NA | 0.95 | 5187.5000 | 5187.5000 | 5187.5000 | 25232.25 KB |
11 | | ConcurretFileUploadPartsize16CpuX1 | Clr | Default | 454.4 ms | NA | 0.16 | 4437.5000 | 3625.0000 | 2937.5000 | 25509.56 KB |
12 | | ConcurretFileUploadPartsize16CpuX2 | Clr | Default | 461.0 ms | NA | 0.16 | 4687.5000 | 3750.0000 | 3062.5000 | 25526.78 KB |
13 | | ConcurretFileUploadPartsize5CpuX1 | Clr | Default | 359.8 ms | NA | 0.13 | 4625.0000 | 3750.0000 | 3062.5000 | 25617.95 KB |
14 | | ConcurrentDirectoryUploadPartsize16CpuX1 | Core | CoreCsProj | 3,446.6 ms | NA | 1.00 | 5187.5000 | 5187.5000 | 5187.5000 | 1.05 KB |
15 | | ConcurrentDirectoryUploadPartsize16CpuX2 | Core | CoreCsProj | 3,521.6 ms | NA | 1.02 | 5187.5000 | 5187.5000 | 5187.5000 | 1.05 KB |
16 | | ConcurrentDirectoryUploadPartsize5CpuX1 | Core | CoreCsProj | 2,773.9 ms | NA | 0.80 | 5187.5000 | 5187.5000 | 5187.5000 | 1.05 KB |
17 | | ConcurretFileUploadPartsize16CpuX1 | Core | CoreCsProj | 409.0 ms | NA | 0.12 | 2812.5000 | 2125.0000 | 1562.5000 | 20085.78 KB |
18 | | ConcurretFileUploadPartsize16CpuX2 | Core | CoreCsProj | 411.3 ms | NA | 0.12 | 3000.0000 | 2375.0000 | 1750.0000 | 20086.08 KB |
19 | | ConcurretFileUploadPartsize5CpuX1 | Core | CoreCsProj | 256.4 ms | NA | 0.07 | 2875.0000 | 2312.5000 | 1687.5000 | 20085.62 KB |
20 |
21 |
22 | ## Benchmark 2 (55 items)
23 |
24 |
25 | | Method | Runtime | Toolchain | Mean | Error | Scaled | Gen 0 | Gen 1 | Gen 2 | Allocated |
26 | |----------------------------------- |-------- |----------- |---------:|------:|-------:|----------:|----------:|----------:|----------:|
27 | | ConcurretFileUploadPartsize16CpuX1 | Clr | Default | 198.8 ms | NA | 1.00 | 4375.0000 | 3687.5000 | 2750.0000 | 24.94 MB |
28 | | ConcurretFileUploadPartsize16CpuX2 | Clr | Default | 241.5 ms | NA | 1.21 | 4250.0000 | 3500.0000 | 2687.5000 | 24.89 MB |
29 | | ConcurretFileUploadPartsize5CpuX1 | Clr | Default | 240.4 ms | NA | 1.21 | 4437.5000 | 3500.0000 | 2750.0000 | 24.83 MB |
30 | | ConcurretFileUploadPartsize16CpuX1 | Core | CoreCsProj | 141.5 ms | NA | 1.00 | 3375.0000 | 2812.5000 | 2125.0000 | 19.62 MB |
31 | | ConcurretFileUploadPartsize16CpuX2 | Core | CoreCsProj | 225.3 ms | NA | 1.59 | 2812.5000 | 2250.0000 | 1625.0000 | 19.62 MB |
32 | | ConcurretFileUploadPartsize5CpuX1 | Core | CoreCsProj | 119.3 ms | NA | 0.84 | 3312.5000 | 2687.5000 | 2125.0000 | 19.62 MB |
33 |
34 |
35 | ## Benchmark3 (2922 items)
36 |
37 | | Method | Runtime | Toolchain | Mean | Error | Scaled | Gen 0 | Gen 1 | Gen 2 | Allocated |
38 | |----------------------------------- |-------- |----------- |--------:|------:|-------:|------------:|------------:|-----------:|-----------:|
39 | | ConcurretFileUploadPartsize16CpuX1 | Clr | Default | 3.293 s | NA | 1.00 | 153125.0000 | 89437.5000 | 76375.0000 | 1124.4 MB |
40 | | ConcurretFileUploadPartsize16CpuX2 | Clr | Default | 3.792 s | NA | 1.15 | 162312.5000 | 92250.0000 | 79312.5000 | 1125.6 MB |
41 | | ConcurretFileUploadPartsize5CpuX1 | Clr | Default | 3.751 s | NA | 1.14 | 165875.0000 | 102312.5000 | 90312.5000 | 1128.92 MB |
42 | | ConcurretFileUploadPartsize16CpuX1 | Core | CoreCsProj | 9.403 s | NA | 1.00 | 134250.0000 | 105937.5000 | 82625.0000 | 908.25 MB |
43 | | ConcurretFileUploadPartsize16CpuX2 | Core | CoreCsProj | 5.404 s | NA | 0.57 | 151687.5000 | 88500.0000 | 77375.0000 | 908.25 MB |
44 | | ConcurretFileUploadPartsize5CpuX1 | Core | CoreCsProj | 5.638 s | NA | 0.60 | 130437.5000 | 98500.0000 | 79250.0000 | 908.24 MB |
45 |
46 |
47 | ## Benchmark4 (5725 items)
48 |
49 | | Method | Runtime | Toolchain | Mean | Error | Scaled | Gen 0 | Gen 1 | Gen 2 | Allocated |
50 | |----------------------------------- |-------- |----------- |--------:|------:|-------:|------------:|------------:|------------:|----------:|
51 | | ConcurretFileUploadPartsize16CpuX1 | Clr | Default | 6.565 s | NA | 1.00 | 268250.0000 | 144687.5000 | 115625.0000 | 2.16 GB |
52 | | ConcurretFileUploadPartsize16CpuX2 | Clr | Default | 5.666 s | NA | 0.86 | 265187.5000 | 141875.0000 | 113437.5000 | 2.15 GB |
53 | | ConcurretFileUploadPartsize5CpuX1 | Clr | Default | 5.990 s | NA | 0.91 | 276125.0000 | 145437.5000 | 116875.0000 | 2.16 GB |
54 | | ConcurretFileUploadPartsize16CpuX1 | Core | CoreCsProj | 3.448 s | NA | 1.00 | 296312.5000 | 257125.0000 | 194500.0000 | 1.74 GB |
55 | | ConcurretFileUploadPartsize16CpuX2 | Core | CoreCsProj | 3.436 s | NA | 1.00 | 285125.0000 | 243875.0000 | 184062.5000 | 1.74 GB |
56 | | ConcurretFileUploadPartsize5CpuX1 | Core | CoreCsProj | 3.535 s | NA | 1.03 | 289125.0000 | 247562.5000 | 187687.5000 | 1.74 GB |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 | ##
4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
5 |
6 | # User-specific files
7 | *.suo
8 | *.user
9 | *.userosscache
10 | *.sln.docstates
11 |
12 | # User-specific files (MonoDevelop/Xamarin Studio)
13 | *.userprefs
14 |
15 | # Build results
16 | [Dd]ebug/
17 | [Dd]ebugPublic/
18 | [Rr]elease/
19 | [Rr]eleases/
20 | x64/
21 | x86/
22 | bld/
23 | [Bb]in/
24 | [Oo]bj/
25 | [Ll]og/
26 |
27 | # Visual Studio 2015 cache/options directory
28 | .vs/
29 | # Uncomment if you have tasks that create the project's static files in wwwroot
30 | #wwwroot/
31 |
32 | # MSTest test Results
33 | [Tt]est[Rr]esult*/
34 | [Bb]uild[Ll]og.*
35 |
36 | # NUNIT
37 | *.VisualState.xml
38 | TestResult.xml
39 |
40 | # Build Results of an ATL Project
41 | [Dd]ebugPS/
42 | [Rr]eleasePS/
43 | dlldata.c
44 |
45 | # Benchmark Results
46 | BenchmarkDotNet.Artifacts/
47 |
48 | # .NET Core
49 | project.lock.json
50 | project.fragment.lock.json
51 | artifacts/
52 | **/Properties/launchSettings.json
53 |
54 | *_i.c
55 | *_p.c
56 | *_i.h
57 | *.ilk
58 | *.meta
59 | *.obj
60 | *.pch
61 | *.pdb
62 | *.pgc
63 | *.pgd
64 | *.rsp
65 | *.sbr
66 | *.tlb
67 | *.tli
68 | *.tlh
69 | *.tmp
70 | *.tmp_proj
71 | *.log
72 | *.vspscc
73 | *.vssscc
74 | .builds
75 | *.pidb
76 | *.svclog
77 | *.scc
78 |
79 | # Chutzpah Test files
80 | _Chutzpah*
81 |
82 | # Visual C++ cache files
83 | ipch/
84 | *.aps
85 | *.ncb
86 | *.opendb
87 | *.opensdf
88 | *.sdf
89 | *.cachefile
90 | *.VC.db
91 | *.VC.VC.opendb
92 |
93 | # Visual Studio profiler
94 | *.psess
95 | *.vsp
96 | *.vspx
97 | *.sap
98 |
99 | # Visual Studio Trace Files
100 | *.e2e
101 |
102 | # TFS 2012 Local Workspace
103 | $tf/
104 |
105 | # Guidance Automation Toolkit
106 | *.gpState
107 |
108 | # ReSharper is a .NET coding add-in
109 | _ReSharper*/
110 | *.[Rr]e[Ss]harper
111 | *.DotSettings.user
112 |
113 | # JustCode is a .NET coding add-in
114 | .JustCode
115 |
116 | # TeamCity is a build add-in
117 | _TeamCity*
118 |
119 | # DotCover is a Code Coverage Tool
120 | *.dotCover
121 |
122 | # AxoCover is a Code Coverage Tool
123 | .axoCover/*
124 | !.axoCover/settings.json
125 |
126 | # Visual Studio code coverage results
127 | *.coverage
128 | *.coveragexml
129 |
130 | # NCrunch
131 | _NCrunch_*
132 | .*crunch*.local.xml
133 | nCrunchTemp_*
134 |
135 | # MightyMoose
136 | *.mm.*
137 | AutoTest.Net/
138 |
139 | # Web workbench (sass)
140 | .sass-cache/
141 |
142 | # Installshield output folder
143 | [Ee]xpress/
144 |
145 | # DocProject is a documentation generator add-in
146 | DocProject/buildhelp/
147 | DocProject/Help/*.HxT
148 | DocProject/Help/*.HxC
149 | DocProject/Help/*.hhc
150 | DocProject/Help/*.hhk
151 | DocProject/Help/*.hhp
152 | DocProject/Help/Html2
153 | DocProject/Help/html
154 |
155 | # Click-Once directory
156 | publish/
157 |
158 | # Publish Web Output
159 | *.[Pp]ublish.xml
160 | *.azurePubxml
161 | # Note: Comment the next line if you want to checkin your web deploy settings,
162 | # but database connection strings (with potential passwords) will be unencrypted
163 | *.pubxml
164 | *.publishproj
165 |
166 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
167 | # checkin your Azure Web App publish settings, but sensitive information contained
168 | # in these scripts will be unencrypted
169 | PublishScripts/
170 |
171 | # NuGet Packages
172 | *.nupkg
173 | # The packages folder can be ignored because of Package Restore
174 | **/[Pp]ackages/*
175 | # except build/, which is used as an MSBuild target.
176 | !**/[Pp]ackages/build/
177 | # Uncomment if necessary however generally it will be regenerated when needed
178 | #!**/[Pp]ackages/repositories.config
179 | # NuGet v3's project.json files produces more ignorable files
180 | *.nuget.props
181 | *.nuget.targets
182 |
183 | # Microsoft Azure Build Output
184 | csx/
185 | *.build.csdef
186 |
187 | # Microsoft Azure Emulator
188 | ecf/
189 | rcf/
190 |
191 | # Windows Store app package directories and files
192 | AppPackages/
193 | BundleArtifacts/
194 | Package.StoreAssociation.xml
195 | _pkginfo.txt
196 | *.appx
197 |
198 | # Visual Studio cache files
199 | # files ending in .cache can be ignored
200 | *.[Cc]ache
201 | # but keep track of directories ending in .cache
202 | !*.[Cc]ache/
203 |
204 | # Others
205 | ClientBin/
206 | ~$*
207 | *~
208 | *.dbmdl
209 | *.dbproj.schemaview
210 | *.jfm
211 | *.pfx
212 | *.publishsettings
213 | orleans.codegen.cs
214 |
215 | # Since there are multiple workflows, uncomment next line to ignore bower_components
216 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
217 | #bower_components/
218 |
219 | # RIA/Silverlight projects
220 | Generated_Code/
221 |
222 | # Backup & report files from converting an old project file
223 | # to a newer Visual Studio version. Backup files are not needed,
224 | # because we have git ;-)
225 | _UpgradeReport_Files/
226 | Backup*/
227 | UpgradeLog*.XML
228 | UpgradeLog*.htm
229 |
230 | # SQL Server files
231 | *.mdf
232 | *.ldf
233 | *.ndf
234 |
235 | # Business Intelligence projects
236 | *.rdl.data
237 | *.bim.layout
238 | *.bim_*.settings
239 |
240 | # Microsoft Fakes
241 | FakesAssemblies/
242 |
243 | # GhostDoc plugin setting file
244 | *.GhostDoc.xml
245 |
246 | # Node.js Tools for Visual Studio
247 | .ntvs_analysis.dat
248 | node_modules/
249 |
250 | # Typescript v1 declaration files
251 | typings/
252 |
253 | # Visual Studio 6 build log
254 | *.plg
255 |
256 | # Visual Studio 6 workspace options file
257 | *.opt
258 |
259 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
260 | *.vbw
261 |
262 | # Visual Studio LightSwitch build output
263 | **/*.HTMLClient/GeneratedArtifacts
264 | **/*.DesktopClient/GeneratedArtifacts
265 | **/*.DesktopClient/ModelManifest.xml
266 | **/*.Server/GeneratedArtifacts
267 | **/*.Server/ModelManifest.xml
268 | _Pvt_Extensions
269 |
270 | # Paket dependency manager
271 | .paket/paket.exe
272 | paket-files/
273 |
274 | # FAKE - F# Make
275 | .fake/
276 |
277 | # JetBrains Rider
278 | .idea/
279 | *.sln.iml
280 |
281 | # CodeRush
282 | .cr/
283 |
284 | # Python Tools for Visual Studio (PTVS)
285 | __pycache__/
286 | *.pyc
287 |
288 | # Cake - Uncomment if you are using it
289 | # tools/**
290 | # !tools/packages.config
291 |
292 | # Tabs Studio
293 | *.tss
294 |
295 | # Telerik's JustMock configuration file
296 | *.jmconfig
297 |
298 | # BizTalk build output
299 | *.btp.cs
300 | *.btm.cs
301 | *.odx.cs
302 | *.xsd.cs
303 |
304 | # OpenCover UI analysis results
305 | OpenCover/
306 |
307 | # Specific Environment Files
308 | *.swp
309 | *.*~
310 | .DS_Store
311 |
312 | # Visual Studio Code
313 | .vscode
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | This project seems not useful on 2025. Archived.
2 |
3 | # S3Sync
4 |
5 | Amazon S3 Content Synchronization with .NET.
6 |
7 | S3Sync synchronize a directory to a S3 Bucket. It meakes bucket identical to the `LocalRoot` (source).
8 |
9 | Note: Remote files that are not in the `LocalRoot` are removed.
10 |
11 | ## How to use
12 |
13 | You can download latest version from [Release](https://github.com/guitarrapc/S3Sync/releases) Page.
14 |
15 | Action | Full.NET | .NETCore 2.0 | Docker
16 | ---- | ---- | ---- | ----
17 | Requirement | .NETFreamework 4.7 or higher | [.NETCore 2.0 or higher](https://www.microsoft.com/net/download/windows) | Docker
18 | Download | `s3sync_netfull.zip` | `s3sync_netcore.tar.gz` | [guitarrapc/s3sync](https://hub.docker.com/r/guitarrapc/s3sync/)
19 | Run | Extract zip and run `S3Sync.exe` | Extract zip and run `dotnet S3Sync.dll` | `docker run guitarrapc/s3sync`
20 |
21 | ## Configuration
22 |
23 | You can pass parameter to S3Sync with `Arguments` or `Environment Variable`.
24 |
25 | Arguments | Environment Variable | Required?
Optional? | Description
26 | ---- | ---- | ---- | ----
27 | BucketName=`"string"` | S3Sync_BucketName | Required | Specify S3 BucketName to sync.
28 | LocalRoot=`"string"` | S3Sync_LocalRoot | Required | Specify Local File Path to Sync.
29 | KeyPrefix=`"string"` | S3Sync_KeyPrefix | Optional | Specify KeyPrefix to add to localfile when Sync.
30 | IgnoreKeyPrefix=`"string"` | S3Sync_IgnoreKeyPrefix | Optional | Specify KeyPrefix to ignore on S3.
31 | ExcludeFiles=`"string","string"` | S3Sync_ExcludeFiles | Optional | Specify local file names you want to exclude. (use `,` for multiple.)
32 | ExcludeDirectories=`"string","string"` | S3Sync_ExcludeDirectories | Optional | Specify local directory names you want to exclude. (use `,` for multiple.)
33 | CredentialProfile=`"string"` | S3Sync_CredentialProfile | Optional | Specify Credential Profile name.
34 | Silent=`bool` | S3Sync_Silent | Optional | Set `true` when you want to supress upload progress. (Default : `false`)
35 | DryRun=`bool` | S3Sync_DryRun | Optional | Set `true` will not change s3 but see estimate plan.
Set `false` to execute synchronization. (Default : `true`)
36 | ContentType=`"string"` | S3Sync_ContentType | Optional | Specify ContentType for object. (default `null` and will be `application/octet-stream`
37 | Region=`"string"` | S3Sync_Region | Optional | Specify region for the bucket. (default `null` and will be `ap-northeast-1`)
38 |
39 | ## Sample
40 |
41 | You can use `dotnet` to run as .NETCore.
42 |
43 | ```bash
44 | $ dotnet S3Sync.dll BucketName=your-awesome-bucket LocalRoot=/Home/User/HogeMoge ExcludeFiles=.gitignore,.gitattributes ExcludeDirectories=.git,test
45 | ```
46 |
47 | No .NETCore? You can use Full.NET as a ConsoleApp.
48 |
49 | ```cmd
50 | S3Sync.exe BucketName=your-fantastic-bucket KeyPrefix=hoge LocalRoot=C:/Users/User/HomeMoge DryRun=false
51 | ```
52 |
53 | ## Docker Support
54 |
55 | You can run with docker.
56 |
57 | Run with IAM Role is recommended.
58 |
59 | ```bash
60 | docker run --rm -v :/app/sync/ -e S3Sync_BucketName= S3Sync_DryRun=false guitarrapc/s3sync
61 | ```
62 |
63 | Local run without IAM Role, use AWS Credentials.
64 |
65 | ```bash
66 | $ docker run --rm -v :/app/sync/ -e S3Sync_BucketName= -e AWS_ACCESS_KEY_ID= -e AWS_SECRET_ACCESS_KEY= S3Sync_DryRun=false guitarrapc/s3sync
67 | ```
68 |
69 | ### Build s3sync within docker
70 |
71 | Build S3Sync with docker-compose. This enable you not to think about .NETCore2.0 sdk installed on your host.
72 |
73 | ```bash
74 | docker-compose -f docker-compose.ci.build.yml up
75 | ```
76 |
77 | Build artifacts will be generated in following path.
78 |
79 | ```bash
80 | S3Sync\source\S3Sync\obj\Docker\publish
81 | ```
82 |
83 | Clean up build docker container resource with down.
84 |
85 | ```bash
86 | docker-compose -f docker-compose.ci.build.yml down
87 | ```
88 |
89 | ### Docker Image Build
90 |
91 | Create docker image with docker-compose.
92 |
93 | ```bash
94 | docker-compose -f docker-compose.yml build
95 | ```
96 |
97 | ## Credential handling
98 |
99 | Synchronization operation requires read, write and delete objects permission.
100 |
101 | It is recommended that you use `IAM Policy` and `Profile` to handle appropriate access right.
102 |
103 | ### Configure IAM Policy
104 |
105 | Here's some sample IAM Policy.
106 |
107 | ```json
108 | {
109 | "Version": "2012-10-17",
110 | "Statement": [
111 | {
112 | "Sid": "Stmt1446117060000",
113 | "Effect": "Allow",
114 | "Action": [
115 | "s3:GetObject",
116 | "s3:ListAllMyBuckets",
117 | "s3:ListBucket",
118 | "s3:PutObject"
119 | ],
120 | "Resource": [
121 | "arn:aws:s3:::*"
122 | ]
123 | }
124 | ]
125 | }
126 | ```
127 |
128 | If you want to restrict access to certain Bucket, then replace `*` with desired bucketName.
129 |
130 | ```json
131 | {
132 | "Version": "2012-10-17",
133 | "Statement": [
134 | {
135 | "Sid": "Stmt1446117060000",
136 | "Effect": "Allow",
137 | "Action": [
138 | "s3:GetObject",
139 | "s3:ListAllMyBuckets",
140 | "s3:ListBucket",
141 | "s3:PutObject"
142 | ],
143 | "Resource": [
144 | "arn:aws:s3:::PutYourBucketName"
145 | ]
146 | }
147 | ]
148 | }
149 | ```
150 |
151 | ### Configure Profile
152 |
153 | There are several way to set profile.
154 |
155 | If you run S3Sync on AWS Resource, you should AWS managed profile like IAM Instance Profile.
156 | If you run S3Sync on Local Environment, you can configure your machine with "aws cli" or otheres.
157 |
158 | #### aws cli onfigure sample
159 |
160 | You can create AWS Credential Profile with AWS CLI.
161 |
162 | ```bash
163 | aws configure --profile sample
164 | ```
165 |
166 | #### Other options?
167 |
168 | You can create Profile with other tools.
169 |
170 | > - [AWS Toolkit for Visual Studio](https://aws.amazon.com/visualstudio/?nc1=f_ls) .
171 | > - [AWS Tools for Windows PowerShell](https://aws.amazon.com/powershell/?nc1=f_ls)
172 |
173 | Or you can use following method.
174 |
175 | ```csharp
176 | public static void RegisterProfile(string profileName, string accessKey, string accessSecret)
177 | {
178 | var option = new CredentialProfileOptions
179 | {
180 | AccessKey = accessKey,
181 | SecretKey = accessSecret
182 | };
183 | new NetSDKCredentialsFile().RegisterProfile(new CredentialProfile(profileName, option));
184 | }
185 | ```
186 |
187 | ## License
188 |
189 | The MIT License (MIT)
190 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/LocalFiles/FileHashHelper.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using System.Collections.Generic;
3 | using System.IO;
4 | using System.Linq;
5 | using System.Security.Cryptography;
6 |
7 | namespace S3Sync.Core.LocalFiles
8 | {
9 | public static class FileHashHelper
10 | {
11 | private static readonly HashAlgorithm md5 = MD5.Create();
12 |
13 | ///
14 | /// Get Chunksize
15 | ///
16 | ///
17 | ///
18 | ///
19 | public static int GetChunkSize(long baseSize, int chunkCount)
20 | {
21 | if (chunkCount == 0) return 0;
22 |
23 | // use bit calculation to detect power of the 2
24 | // 10000 & 11111 should be 0.
25 | var modArrange = (baseSize & baseSize - 1) == 0 ? 0 : 1;
26 |
27 | // Calculate chunksize (if mod exists than - chunkCount)
28 | return EstimateChunkSize((decimal)baseSize / (chunkCount - modArrange));
29 | }
30 |
31 | ///
32 | /// Estimate Max available 2^n Chunk size.
33 | ///
34 | /// http://docs.aws.amazon.com/ja_jp/AmazonS3/latest/dev/qfacts.html
35 | ///
36 | ///
37 | public static int EstimateChunkSize(decimal baseSize)
38 | {
39 | if (baseSize < 536870912) // 512MB
40 | {
41 | if (baseSize < 33554432) // 32MB
42 | {
43 | if (baseSize < 16777216) // 16MB
44 | {
45 | if (baseSize < 8388608) // 8MB
46 | {
47 | return 5;
48 | }
49 | else
50 | {
51 | return 8;
52 | }
53 | }
54 | else
55 | {
56 | return 16;
57 | }
58 | }
59 | else
60 | {
61 | if (baseSize < 134217728) // 128MB
62 | {
63 | if (baseSize < 67108864) // 64MB
64 | {
65 | return 32;
66 | }
67 | else
68 | {
69 | return 64;
70 | }
71 | }
72 | else
73 | {
74 | if (baseSize < 268435456) // 256MB
75 | {
76 | return 128;
77 | }
78 | else
79 | {
80 | return 256;
81 | }
82 | }
83 | }
84 | }
85 | else
86 | {
87 | if (baseSize < 2147483648) // 2,048MB
88 | {
89 | if (baseSize < 1073741824) // 1,024MB
90 | {
91 | return 512;
92 | }
93 | else
94 | {
95 | return 1024;
96 | }
97 | }
98 | else
99 | {
100 | if (baseSize < 4294967296) // 4,096MB
101 | {
102 | return 2048;
103 | }
104 | else
105 | {
106 | return 4096;
107 | }
108 | }
109 | }
110 | }
111 |
112 | public static byte[] GetFileBinary(string filePath)
113 | {
114 | using (var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read))
115 | {
116 | var fileBytes = new byte[stream.Length];
117 | stream.Read(fileBytes, 0, fileBytes.Length);
118 | return fileBytes;
119 | }
120 | }
121 |
122 | public static byte[] GetMD5Hash(this byte[] array)
123 | {
124 | var hash = GetHash(array, md5);
125 | return hash;
126 | }
127 |
128 | public static byte[] GetHash(this byte[] array, HashAlgorithm algorithm)
129 | {
130 | var hash = algorithm.ComputeHash(array);
131 | return hash;
132 | }
133 |
134 | public static string CalculateEtag(byte[] array, int chunkCount)
135 | {
136 | using (var md5 = MD5.Create())
137 | {
138 | if (chunkCount == 0)
139 | {
140 | return array.GetHash(md5).ToHexString();
141 | }
142 |
143 | var multipartSplitCount = 0;
144 | var chunkSize = 1024 * 1024 * chunkCount;
145 | var splitCount = array.Length / chunkSize;
146 | var mod = array.Length - chunkSize * splitCount;
147 | IEnumerable concatHash = Enumerable.Empty();
148 |
149 | for (var i = 0; i < splitCount; i++)
150 | {
151 | var offset = i == 0 ? 0 : chunkSize * i;
152 | var chunk = GetSegment(array, offset, chunkSize);
153 | var hash = chunk.ToArray().GetHash(md5);
154 | concatHash = concatHash.Concat(hash);
155 | multipartSplitCount++;
156 | }
157 | if (mod != 0)
158 | {
159 | var chunk = GetSegment(array, chunkSize * splitCount, mod);
160 | var hash = chunk.ToArray().GetHash(md5);
161 | concatHash = concatHash.Concat(hash);
162 | multipartSplitCount++;
163 | }
164 | var multipartHash = concatHash.ToArray().GetHash(md5).ToHexString();
165 |
166 | if (multipartSplitCount <= 0)
167 | {
168 | return multipartHash;
169 | }
170 | else
171 | {
172 | return multipartHash + "-" + multipartSplitCount;
173 | }
174 | }
175 | }
176 |
177 | private static ArraySegment GetSegment(this T[] array, int offset, int? count = null)
178 | {
179 | if (count == null) { count = array.Length - offset; }
180 | return new ArraySegment(array, offset, count.Value);
181 | }
182 |
183 | public static string ToHashString(this byte[] hash)
184 | {
185 | return BitConverter.ToString(hash).Replace("-", "").ToLower();
186 | }
187 |
188 | public static string ToHexString(this byte[] bytes)
189 | {
190 | var chars = new char[bytes.Length * 2];
191 |
192 | for (int i = 0; i < bytes.Length; i++)
193 | {
194 | chars[2 * i] = ToHexDigit(bytes[i] / 16);
195 | chars[2 * i + 1] = ToHexDigit(bytes[i] % 16);
196 | }
197 |
198 | return new string(chars).ToLower();
199 | }
200 |
201 | private static char ToHexDigit(int i)
202 | {
203 | if (i < 10)
204 | {
205 | return (char)(i + '0');
206 | }
207 | return (char)(i - 10 + 'A');
208 | }
209 | }
210 | }
211 |
--------------------------------------------------------------------------------
/source/S3Sync.BenchmarkCore/Program.cs:
--------------------------------------------------------------------------------
1 | using Amazon.S3;
2 | using Amazon.S3.Transfer;
3 | using BenchmarkDotNet.Attributes;
4 | using BenchmarkDotNet.Configs;
5 | using BenchmarkDotNet.Diagnosers;
6 | using BenchmarkDotNet.Environments;
7 | using BenchmarkDotNet.Exporters;
8 | using BenchmarkDotNet.Jobs;
9 | using BenchmarkDotNet.Running;
10 | using BenchmarkDotNet.Toolchains.CsProj;
11 | using S3Sync.Core;
12 | using S3Sync.Core.LocalFiles;
13 | using System;
14 | using System.IO;
15 | using System.Linq;
16 | using System.Threading.Tasks;
17 |
18 | namespace S3Sync.BenchmarkCore
19 | {
20 | public class Program
21 | {
22 | static void Main(string[] args)
23 | {
24 | var switcher = new BenchmarkSwitcher(new[]
25 | {
26 | // Target is Multipart Directory and Files.
27 | // SinglePart is extremely slow.
28 | typeof(ConcurrentBenchmark),
29 | });
30 |
31 | args = new string[] { "0" };
32 | switcher.Run(args);
33 | }
34 |
35 | public class BenchmarkConfig : ManualConfig
36 | {
37 | public BenchmarkConfig()
38 | {
39 | Add(MarkdownExporter.GitHub);
40 | Add(MemoryDiagnoser.Default);
41 |
42 | // .NETCore
43 | Add(Job.ShortRun.With(Runtime.Core)
44 | .With(CsProjCoreToolchain.NetCoreApp20)
45 | .WithWarmupCount(1)
46 | .WithTargetCount(1)
47 | .WithLaunchCount(1));
48 |
49 | // Full.Net
50 | Add(Job.ShortRun.With(Runtime.Clr)
51 | .With(Jit.RyuJit)
52 | .With(Platform.X64)
53 | .WithWarmupCount(1)
54 | .WithTargetCount(1)
55 | .WithLaunchCount(1));
56 | }
57 | }
58 |
59 | ///
60 | /// Multipart Synchronization
61 | ///
62 | [Config(typeof(BenchmarkConfig))]
63 | public class ConcurrentBenchmark
64 | {
65 | public S3Client S3 { get; set; } = new S3Client(new S3ClientOption { DryRun = true }, AmazonCredential.GetCredential(Environment.GetEnvironmentVariable("S3Sync_Bench_CredentialProfile")));
66 | public string BucketName { get; set; } = Environment.GetEnvironmentVariable("S3Sync_Bench_BucketName");
67 | public string LocalRoot { get; set; } = Environment.GetEnvironmentVariable("S3Sync_Bench_LocalRoot");
68 |
69 | [GlobalSetup]
70 | public void Setup()
71 | {
72 | }
73 |
74 | [Benchmark]
75 | public async Task ConcurrentDirectoryUploadPartsize16CpuX1()
76 | {
77 | var directoryUploadRequest = new TransferUtilityUploadDirectoryRequest
78 | {
79 | BucketName = BucketName,
80 | Directory = LocalRoot,
81 | SearchOption = SearchOption.AllDirectories,
82 | SearchPattern = "*",
83 | StorageClass = S3StorageClass.ReducedRedundancy,
84 | };
85 |
86 | //directoryUploadRequest.UploadDirectoryFileRequestEvent += (sender, e) =>
87 | //{
88 | // e.UploadRequest.PartSize = S3.TransferConfig.MinSizeBeforePartUpload;
89 | //};
90 | //directoryUploadRequest.UploadDirectoryProgressEvent += (senter, e) =>
91 | //{
92 | // //Console.WriteLine($"{((decimal)e.TransferredBytes / e.TotalBytes).ToString("p")}, {e}");
93 | //};
94 | await S3.Transfer.UploadDirectoryAsync(directoryUploadRequest);
95 | }
96 |
97 | [Benchmark]
98 | public async Task ConcurrentDirectoryUploadPartsize16CpuX2()
99 | {
100 | var directoryUploadRequest = new TransferUtilityUploadDirectoryRequest
101 | {
102 | BucketName = BucketName,
103 | Directory = LocalRoot,
104 | SearchOption = SearchOption.AllDirectories,
105 | SearchPattern = "*",
106 | StorageClass = S3StorageClass.ReducedRedundancy,
107 | };
108 | //directoryUploadRequest.UploadDirectoryFileRequestEvent += (sender, e) =>
109 | //{
110 | // e.UploadRequest.PartSize = S3.TransferConfig.MinSizeBeforePartUpload;
111 | //};
112 | //directoryUploadRequest.UploadDirectoryProgressEvent += (senter, e) =>
113 | //{
114 | // //Console.WriteLine($"{((decimal)e.TransferredBytes / e.TotalBytes).ToString("p")}, {e}");
115 | //};
116 | await S3.Transfer2.UploadDirectoryAsync(directoryUploadRequest);
117 | }
118 |
119 | [Benchmark]
120 | public async Task ConcurrentDirectoryUploadPartsize5CpuX1()
121 | {
122 | var directoryUploadRequest = new TransferUtilityUploadDirectoryRequest
123 | {
124 | BucketName = BucketName,
125 | Directory = LocalRoot,
126 | SearchOption = SearchOption.AllDirectories,
127 | SearchPattern = "*",
128 | StorageClass = S3StorageClass.ReducedRedundancy,
129 | };
130 |
131 | //directoryUploadRequest.UploadDirectoryFileRequestEvent += (sender, e) =>
132 | //{
133 | //};
134 | //directoryUploadRequest.UploadDirectoryProgressEvent += (senter, e) =>
135 | //{
136 | // //Console.WriteLine($"{((decimal)e.TransferredBytes / e.TotalBytes).ToString("p")}, {e}");
137 | //};
138 | await S3.Transfer.UploadDirectoryAsync(directoryUploadRequest);
139 | }
140 |
141 | [Benchmark(Baseline = true)]
142 | public async Task ConcurretFileUploadPartsize16CpuX1()
143 | {
144 | var tasks = new EnumerableFileSystem().EnumerateFiles(LocalRoot)
145 | .Select(async x =>
146 | {
147 | var fileUploadRequest = new TransferUtilityUploadRequest
148 | {
149 | BucketName = BucketName,
150 | FilePath = x.FullPath,
151 | Key = x.FullPath.Replace(LocalRoot + @"\", "").Replace(@"\", "/"),
152 | PartSize = S3.TransferConfig.MinSizeBeforePartUpload,
153 | StorageClass = S3StorageClass.ReducedRedundancy,
154 | };
155 | fileUploadRequest.UploadProgressEvent += (sender, e) =>
156 | {
157 | //Console.WriteLine($"{e.PercentDone}%, {e.FilePath}, {e}");
158 | };
159 |
160 | await S3.Transfer.UploadAsync(fileUploadRequest);
161 | });
162 | await Task.WhenAll(tasks);
163 | }
164 |
165 | [Benchmark]
166 | public async Task ConcurretFileUploadPartsize16CpuX2()
167 | {
168 | var tasks = new EnumerableFileSystem().EnumerateFiles(LocalRoot)
169 | .Select(async x =>
170 | {
171 | var fileUploadRequest = new TransferUtilityUploadRequest
172 | {
173 | BucketName = BucketName,
174 | FilePath = x.FullPath,
175 | Key = x.FullPath.Replace(LocalRoot + @"\", "").Replace(@"\", "/"),
176 | PartSize = S3.TransferConfig.MinSizeBeforePartUpload,
177 | StorageClass = S3StorageClass.ReducedRedundancy,
178 | };
179 | await S3.Transfer2.UploadAsync(fileUploadRequest);
180 | });
181 | await Task.WhenAll(tasks);
182 | }
183 |
184 | [Benchmark]
185 | public async Task ConcurretFileUploadPartsize5CpuX1()
186 | {
187 | var tasks = new EnumerableFileSystem().EnumerateFiles(LocalRoot)
188 | .Select(async x =>
189 | {
190 | var fileUploadRequest = new TransferUtilityUploadRequest
191 | {
192 | BucketName = BucketName,
193 | FilePath = x.FullPath,
194 | Key = x.FullPath.Replace(LocalRoot + @"\", "").Replace(@"\", "/"),
195 | StorageClass = S3StorageClass.ReducedRedundancy,
196 | };
197 | await S3.Transfer.UploadAsync(fileUploadRequest);
198 | });
199 | await Task.WhenAll(tasks);
200 | }
201 | }
202 | }
203 | }
--------------------------------------------------------------------------------
/source/S3Sync/Program.cs:
--------------------------------------------------------------------------------
1 | using System;
2 | using S3Sync.Core;
3 | using S3Sync.Core.LocalFiles;
4 | using System.Linq;
5 | using Amazon.S3.Transfer;
6 | using System.Diagnostics;
7 | using System.Net;
8 | using System.Threading.Tasks;
9 | using S3Sync.Core.Diagnostics;
10 |
11 | namespace S3Sync
12 | {
13 | class Program
14 | {
15 | private static string BucketName { get; set; }
16 | private static string LocalRoot { get; set; }
17 | private static string KeyPrefix { get; set; }
18 | private static string IgnoreKeyPrefix { get; set; }
19 | private static string[] ExcludeFiles { get; set; }
20 | private static string[] ExcludeDirectories { get; set; }
21 | private static bool Silent { get; set; }
22 | private static string CredentialProfile { get; set; }
23 | private static S3ClientOption Option { get; set; } = new S3ClientOption();
24 | private static Action UploadCallback { get; set; }
25 |
26 | private enum ArgumentType
27 | {
28 | BucketName = 0,
29 | LocalRoot,
30 | KeyPrefix,
31 | IgnoreKeyPrefix,
32 | ExcludeFiles,
33 | ExcludeDirectories,
34 | Silent,
35 | CredentialProfile,
36 | DryRun,
37 | ContentType,
38 | Region,
39 | }
40 |
41 | private enum EnvType
42 | {
43 | S3Sync_BucketName = 0,
44 | S3Sync_LocalRoot,
45 | S3Sync_KeyPrefix,
46 | S3Sync_IgnoreKeyPrefix,
47 | S3Sync_ExcludeFiles,
48 | S3Sync_ExcludeDirectories,
49 | S3Sync_Silent,
50 | S3Sync_CredentialProfile,
51 | S3Sync_DryRun,
52 | S3Sync_ContentType,
53 | S3Sync_Region,
54 | }
55 |
56 | ///
57 | /// Sample .NETCore : dotnet S3Sync.dll BucketName=guitarrapc-multipart-test LocalRoot=C:\HogeMogeImages ExcludeFiles=.gitignore,.gitattributes ExcludeDirectories=.git,test DryRun=false
58 | /// Sample Full.NET : S3Sync.exe BucketName=guitarrapc-multipart-test KeyPrefix=hoge LocalRoot=C:\HomeMogeImages ExcludeFiles=.gitignore,.gitattributes, ExcludeDirectories=.git,test DryRun=false
59 | ///
60 | ///
61 | static void Main(string[] args)
62 | {
63 | try
64 | {
65 | MainCoreAsync(args).GetAwaiter().GetResult();
66 | Environment.Exit(0);
67 | }
68 | catch (Exception ex)
69 | {
70 | if (ex.InnerException != null)
71 | {
72 | Console.Error.WriteLine($"{ex.InnerException.Message}, {ex.InnerException.GetType().FullName}, {ex.InnerException.StackTrace}");
73 | }
74 | Console.Error.WriteLine($"{ex.Message}, {ex.GetType().FullName}, {ex.StackTrace}");
75 | Environment.Exit(1);
76 | }
77 | }
78 |
79 | static async Task MainCoreAsync(string[] args)
80 | {
81 | var sw = Stopwatch.StartNew();
82 |
83 | // Apply initial settings
84 | ApplyInitialConfiguration();
85 |
86 | // Validate argumanges
87 | LogTitle("Start : Evaluate Arguments. Override with EnvironmentVariables if missing argument.");
88 | EvaluateArguments(args);
89 |
90 | // Set UploadCallback
91 | if (!Silent)
92 | {
93 | // アップロード時のコールバック登録
94 | UploadCallback = e => Log($"{e.PercentDone}%, {e.FilePath}, {nameof(KeyPrefix)} : {KeyPrefix}, {e}");
95 | }
96 |
97 | // Obtain local files
98 | LogTitle("Start : Obtain Local items.");
99 | var localFileInfos = new EnumerableFileSystem(ExcludeDirectories, ExcludeFiles)
100 | .EnumerateFiles(LocalRoot)
101 | .ToArray();
102 |
103 | var obtainLocal = sw.Elapsed;
104 | Log($@"Complete : Obtain Local items.", obtainLocal);
105 | sw.Restart();
106 |
107 | // Get Credential
108 | // Missing CredentialProfile : Use IAM Instance Profile
109 | // Found CredentialProfile : Use as ProfileName
110 | LogTitle("Start : Obtain credential");
111 | var s3 = string.IsNullOrEmpty(CredentialProfile)
112 | ? new S3Client(Option)
113 | : new S3Client(Option, AmazonCredential.GetCredential(CredentialProfile));
114 |
115 | // Begin Synchronization
116 | LogTitle("Start : Synchronization");
117 | var result = await s3.SyncWithLocal(localFileInfos, BucketName, KeyPrefix, IgnoreKeyPrefix, UploadCallback);
118 |
119 | // Show result
120 | LogTitle($@"Show : Synchronization result as follows.");
121 | Warn(result.ToMarkdown());
122 | var synchronization = sw.Elapsed;
123 | Log($@"Complete : Synchronization.", synchronization);
124 |
125 | // Total result
126 | Log($@"Total :", (obtainLocal + synchronization));
127 | }
128 |
129 | static void ApplyInitialConfiguration()
130 | {
131 | // Web周りの設定
132 | ServicePointManager.UseNagleAlgorithm = false;
133 | ServicePointManager.Expect100Continue = false;
134 | ServicePointManager.DefaultConnectionLimit = 96;
135 | }
136 |
137 | static void EvaluateArguments(string[] args)
138 | {
139 | // BucketName=nantokakantokabucket
140 | BucketName = args.Where(x => x.StartsWith(ArgumentType.BucketName.ToString(), StringComparison.InvariantCultureIgnoreCase))
141 | .SelectMany(x => x.SplitEx("="))
142 | .LastOrDefault()
143 | ?? GetEnvValueString(ArgumentType.BucketName, EnvType.S3Sync_BucketName);
144 |
145 | // LocalRoot=c:\hogemoge
146 | LocalRoot = args.Where(x => x.StartsWith(ArgumentType.LocalRoot.ToString(), StringComparison.InvariantCultureIgnoreCase))
147 | .SelectMany(x => x.SplitEx("="))
148 | .LastOrDefault()
149 | ?? GetEnvValueString(ArgumentType.LocalRoot, EnvType.S3Sync_LocalRoot);
150 |
151 | // KeyPrefix=image
152 | KeyPrefix = args.Where(x => x.StartsWith(ArgumentType.KeyPrefix.ToString(), StringComparison.InvariantCultureIgnoreCase))
153 | .SelectMany(x => x.SplitEx("="))
154 | .LastOrDefault()
155 | ?.TrimEnd('/')
156 | ?? GetEnvValueString(ArgumentType.KeyPrefix, EnvType.S3Sync_KeyPrefix);
157 |
158 | // IgnoreKeyPrefix=image
159 | IgnoreKeyPrefix = args.Where(x => x.StartsWith(ArgumentType.IgnoreKeyPrefix.ToString(), StringComparison.InvariantCultureIgnoreCase))
160 | .SelectMany(x => x.SplitEx("="))
161 | .LastOrDefault()
162 | ?.TrimEnd('/')
163 | ?? GetEnvValueString(ArgumentType.IgnoreKeyPrefix, EnvType.S3Sync_IgnoreKeyPrefix);
164 |
165 | // ExcludeFiles=hogemoge,fugafuga
166 | ExcludeFiles = args.Where(x => x.StartsWith(ArgumentType.ExcludeFiles.ToString(), StringComparison.InvariantCultureIgnoreCase))
167 | .SelectMany(x => x.SplitEx("="))
168 | .LastOrDefault()
169 | ?.SplitEx(",")
170 | .Select(x => x.Trim())
171 | .ToArray()
172 | ?? GetEnvValueString(ArgumentType.ExcludeFiles, EnvType.S3Sync_ExcludeFiles)
173 | ?.SplitEx(",");
174 |
175 | // ExcludeDirectories=hogemoge,fugafuga
176 | ExcludeDirectories = args.Where(x => x.StartsWith(ArgumentType.ExcludeDirectories.ToString(), StringComparison.InvariantCultureIgnoreCase))
177 | .SelectMany(x => x.SplitEx("="))
178 | .LastOrDefault()
179 | ?.SplitEx(",")
180 | ?.Select(x => x.Trim())
181 | .ToArray()
182 | ?? GetEnvValueString(ArgumentType.ExcludeDirectories, EnvType.S3Sync_ExcludeDirectories)
183 | ?.SplitEx(",");
184 |
185 | // Silent=false
186 | Silent = bool.Parse(args.Where(x => x.StartsWith(ArgumentType.Silent.ToString(), StringComparison.InvariantCultureIgnoreCase))
187 | .SelectMany(x => x.SplitEx("="))
188 | .Where(x => string.Equals(x, "true", StringComparison.InvariantCultureIgnoreCase) || string.Equals(x, "false", StringComparison.InvariantCultureIgnoreCase))
189 | .LastOrDefault()
190 | ?.Trim()
191 | ?? GetEnvValueString(ArgumentType.Silent, EnvType.S3Sync_Silent)
192 | ?? "false");
193 |
194 | // CredentialProfile=ProfileName
195 | CredentialProfile = args.Where(x => x.StartsWith(ArgumentType.CredentialProfile.ToString(), StringComparison.InvariantCultureIgnoreCase))
196 | .SelectMany(x => x.SplitEx("="))
197 | .LastOrDefault()
198 | ?.Trim()
199 | ?? GetEnvValueString(ArgumentType.CredentialProfile, EnvType.S3Sync_CredentialProfile);
200 |
201 | // DryRun=true
202 | Option.DryRun = bool.Parse(args.Where(x => x.StartsWith(ArgumentType.DryRun.ToString(), StringComparison.InvariantCultureIgnoreCase))
203 | .SelectMany(x => x.SplitEx("="))
204 | .Where(x => string.Equals(x, "true", StringComparison.InvariantCultureIgnoreCase) || string.Equals(x, "false", StringComparison.InvariantCultureIgnoreCase))
205 | .LastOrDefault()
206 | ?.Trim()
207 | ?? GetEnvValueString(ArgumentType.DryRun, EnvType.S3Sync_DryRun)
208 | ?? "true");
209 |
210 | // ContentType=application/json
211 | Option.ContentType = args.Where(x => x.StartsWith(ArgumentType.ContentType.ToString(), StringComparison.InvariantCultureIgnoreCase))
212 | .SelectMany(x => x.SplitEx("="))
213 | .LastOrDefault()
214 | ?.TrimEnd('/')
215 | ?? GetEnvValueString(ArgumentType.ContentType, EnvType.S3Sync_ContentType);
216 |
217 | // Region=us-east-1
218 | Option.Region = args.Where(x => x.StartsWith(ArgumentType.Region.ToString(), StringComparison.InvariantCultureIgnoreCase))
219 | .SelectMany(x => x.SplitEx("="))
220 | .LastOrDefault()
221 | ?.TrimEnd('/')
222 | ?? GetEnvValueString(ArgumentType.Region, EnvType.S3Sync_Region);
223 |
224 | // Show Arguments
225 | Log($"{nameof(BucketName)} : {BucketName}");
226 | Log($"{nameof(LocalRoot)} : {LocalRoot}");
227 | Log($"{nameof(KeyPrefix)} : {KeyPrefix}");
228 | Log($"{nameof(IgnoreKeyPrefix)} : {IgnoreKeyPrefix}");
229 | Log($"{nameof(ExcludeFiles)} : {ExcludeFiles?.ToJoinedString(",")}");
230 | Log($"{nameof(ExcludeDirectories)} : {ExcludeDirectories?.ToJoinedString(",")}");
231 | Log($"{nameof(Silent)} : {Silent}");
232 | Log($"{nameof(CredentialProfile)} : {CredentialProfile}");
233 | Log($"{nameof(Option.DryRun)} : {Option.DryRun}");
234 | Log($"{nameof(Option.ContentType)} : {Option.ContentType}");
235 | Log($"{nameof(Option.Region)} : {Option.Region}");
236 |
237 | // Validate Required arguments
238 | if (string.IsNullOrWhiteSpace(BucketName))
239 | {
240 | Error("Please pass arguments. See detail followings.");
241 | PrintHelp();
242 | throw new NullReferenceException(nameof(BucketName));
243 | }
244 |
245 | if (string.IsNullOrWhiteSpace(LocalRoot))
246 | {
247 | Error("Please pass arguments. See detail followings.");
248 | PrintHelp();
249 | throw new NullReferenceException(nameof(LocalRoot));
250 | }
251 | }
252 |
253 | private static string GetEnvValueString(ArgumentType arg, EnvType env)
254 | {
255 | var result = Environment.GetEnvironmentVariable(env.ToString());
256 | if (!string.IsNullOrEmpty(result))
257 | {
258 | Warn($"Missing Argument : {arg.ToString()}, overriding with existing Env Value. Env Key : {env}");
259 | }
260 | else
261 | {
262 | Log($"Missing Argument : {arg.ToString()}, you can override with Env Value. Env Key : {env}");
263 | }
264 | return result;
265 | }
266 |
267 | private static void Error(string text)
268 | {
269 | Console.Error.WriteLine(text);
270 | }
271 |
272 | private static void Warn(string text)
273 | {
274 | Log(text, ConsoleColor.DarkYellow);
275 | }
276 |
277 | private static void PrintHelp()
278 | {
279 | Warn($@"-----------------------------------
280 | Arguments
281 | -----------------------------------
282 |
283 | Primary use this value. If argument is missing then EnvironmentVariable will override for you.
284 |
285 | {ArgumentType.BucketName}='~'
286 | (Override envkey : {EnvType.S3Sync_BucketName})
287 | Synchronize Target S3 Bucket
288 | {ArgumentType.LocalRoot}='~'
289 | (Override envkey : {EnvType.S3Sync_LocalRoot})
290 | Local Synchronization Path
291 |
292 | {ArgumentType.KeyPrefix}='~'
293 | (Override envkey : {EnvType.S3Sync_KeyPrefix})
294 | (default : null)
295 | Appending S3 Key Prefix on synchronization. Last char '/' will be ignored.
296 | This prefix will be appended head for each synchronized item so the parent
297 | or same directory items on S3 will be ignored from synchronization.
298 | {ArgumentType.IgnoreKeyPrefix}='~'
299 | (Override envkey : {EnvType.S3Sync_IgnoreKeyPrefix})
300 | (default : null)
301 | Key Prefix for items ignore on S3. Last char '/' will be ignored. S3 path
302 | for this prefix will be ignore from synchronization.
303 | {ArgumentType.ExcludeFiles}=['~','~']
304 | (Override envkey : {EnvType.S3Sync_ExcludeFiles})
305 | (default : null)
306 | Local exclude fileNames. Use , for multiple items.
307 | {ArgumentType.ExcludeDirectories}=['~','~']
308 | (Override envkey : {EnvType.S3Sync_ExcludeDirectories})
309 | (default : null)
310 | Local exclude directory Path. Use , for multiple items.
311 | {ArgumentType.Silent}=[true|false]
312 | (Override envkey : {EnvType.S3Sync_Silent})
313 | (default : false)
314 | Show upload progress or not.
315 | {ArgumentType.CredentialProfile}='~'
316 | (Override envkey : {EnvType.S3Sync_CredentialProfile})
317 | (default : null)
318 | Pass ProfileName. If missing, it expect running with IAM Instance Profile.
319 |
320 | -----------------------------------
321 | Examples
322 | -----------------------------------
323 |
324 | Example1.
325 | Synchronize LocalPath 'c:\hoge\moge' with S3Bucket 'MOGEBUCKET'.
326 |
327 | - Full.NET :
328 | S3Sync.exe {ArgumentType.BucketName}=MOGEBUCKET {ArgumentType.LocalRoot}=c:\hoge\moge
329 |
330 | - .NETCore :
331 | dotnet S3Sync.dll {ArgumentType.BucketName}=MOGEBUCKET {ArgumentType.LocalRoot}=c:\hoge\moge
332 |
333 | Exmaple2.
334 | Synchronize LocalPath 'c:\hoge\moge' with S3Bucket 'MOGEBUCKET'.
335 | Ignore local files '.gitignore' and '.gitattributes'.
336 | Ignore local folder '.git'.
337 |
338 | - Full.NET :
339 | S3Sync.exe {ArgumentType.BucketName}=MOGEBUCKET {ArgumentType.LocalRoot}=c:\hoge\moge {ArgumentType.ExcludeFiles}=.gitignore,.gitattributes {ArgumentType.ExcludeDirectories}=.git
340 |
341 | - .NETCore :
342 | dotnet S3Sync.dll {ArgumentType.BucketName}=MOGEBUCKET {ArgumentType.LocalRoot}=c:\hoge\moge {ArgumentType.ExcludeFiles}=.gitignore,.gitattributes {ArgumentType.ExcludeDirectories}=.git
343 |
344 | Exmaple3.
345 | Synchronize LocalPath 'c:\hoge\moge' with S3Bucket 'MOGEBUCKET'.
346 | Append 'test/fuga/' as KeyPrefix.
347 |
348 | - Full.NET :
349 | S3Sync.exe {ArgumentType.BucketName}=MOGEBUCKET {ArgumentType.LocalRoot}=c:\hoge\moge {ArgumentType.KeyPrefix}=test/fuga
350 |
351 | - .NETCore :
352 | dotnet S3Sync.dll {ArgumentType.BucketName}=MOGEBUCKET {ArgumentType.LocalRoot}=c:\hoge\moge {ArgumentType.KeyPrefix}=test/fuga
353 |
354 | Exmaple4.
355 | Synchronize LocalPath 'c:\hoge\moge' with S3Bucket 'MOGEBUCKET'.
356 | Append 'test/fuga/' as KeyPrefix.
357 | Ignore existing S3 KeyPrefix 'test/fuga/hoge' items.
358 |
359 | - Full.NET :
360 | S3Sync.exe {ArgumentType.BucketName}=MOGEBUCKET {ArgumentType.LocalRoot}=c:\hoge\moge {ArgumentType.KeyPrefix}=test/fuga {ArgumentType.IgnoreKeyPrefix}=test/fuga/hoge
361 |
362 | - .NETCore :
363 | dotnet S3Sync.dll {ArgumentType.BucketName}=MOGEBUCKET {ArgumentType.LocalRoot}=c:\hoge\moge {ArgumentType.KeyPrefix}=test/fuga {ArgumentType.IgnoreKeyPrefix}=test/fuga/hoge
364 |
365 | Exmaple5.
366 | Synchronize LocalPath 'c:\hoge\moge' with S3Bucket 'MOGEBUCKET'.
367 | Use CredentialProfile name 'hoge'.
368 |
369 | - Full.NET :
370 | S3Sync.exe {ArgumentType.BucketName}=MOGEBUCKET {ArgumentType.LocalRoot}=c:\hoge\moge CredentialProfile=hoge
371 |
372 | - .NETCore :
373 | dotnet S3Sync.dll {ArgumentType.BucketName}=MOGEBUCKET {ArgumentType.LocalRoot}=c:\hoge\moge CredentialProfile=hoge
374 | ");
375 | }
376 |
377 | private static void Log(string text)
378 | {
379 | Log(text, ConsoleColor.DarkGray);
380 | }
381 |
382 | private static void Log(string text, TimeSpan elapsed)
383 | {
384 | Log($"{text} {elapsed.TotalSeconds.ToRound(2)}sec", ConsoleColor.DarkGray);
385 | }
386 |
387 | private static void Log(string text, ConsoleColor color)
388 | {
389 | lock (typeof(Program))
390 | {
391 | var oldColor = Console.ForegroundColor;
392 | if (oldColor != color)
393 | {
394 | Console.ForegroundColor = color;
395 | }
396 |
397 | Console.WriteLine(text);
398 |
399 | if (oldColor != color)
400 | {
401 | Console.ForegroundColor = oldColor;
402 | }
403 | }
404 | }
405 |
406 | private static void LogTitle(string text)
407 | {
408 | Log($@"
409 | ===============================================
410 | {text}
411 | ===============================================", ConsoleColor.White);
412 | }
413 | }
414 | }
415 |
--------------------------------------------------------------------------------
/source/S3Sync.Core/S3Client.cs:
--------------------------------------------------------------------------------
1 | using Amazon;
2 | using Amazon.Runtime;
3 | using Amazon.S3;
4 | using Amazon.S3.Model;
5 | using Amazon.S3.Transfer;
6 | using S3Sync.Core.Diagnostics;
7 | using S3Sync.Core.Extentions;
8 | using S3Sync.Core.LocalFiles;
9 | using System;
10 | using System.Collections.Generic;
11 | using System.Diagnostics;
12 | using System.Linq;
13 | using System.Net;
14 | using System.Threading.Tasks;
15 |
16 | namespace S3Sync.Core
17 | {
18 | public class S3Client : IDisposable
19 | {
20 | private static readonly RegionEndpoint defaultEndPoint = RegionEndpoint.APNortheast1;
21 | private static readonly int queueLength = 10000;
22 |
23 | public AmazonS3Config S3Config { get; private set; }
24 | public AmazonS3Client Client { get; private set; }
25 | public TransferUtilityConfig TransferConfig { get; private set; }
26 | public TransferUtility Transfer { get; private set; }
27 | public TransferUtility Transfer2 { get; private set; }
28 | public S3ClientOption Option { get; private set; }
29 |
30 | ///
31 | /// IAM Instance Profile Version
32 | ///
33 | public S3Client(S3ClientOption option)
34 | {
35 | S3Config = new AmazonS3Config
36 | {
37 | RegionEndpoint = !string.IsNullOrEmpty(option.Region)
38 | ? RegionEndpoint.GetBySystemName(option.Region)
39 | : defaultEndPoint,
40 | };
41 | TransferConfig = new TransferUtilityConfig
42 | {
43 | MinSizeBeforePartUpload = 1024 * 1024 * 16, // 16MB
44 | ConcurrentServiceRequests = Environment.ProcessorCount * 2,
45 | };
46 | Client = new AmazonS3Client(S3Config);
47 | Transfer = new TransferUtility(Client);
48 | Transfer2 = new TransferUtility(Client, TransferConfig);
49 | Option = option;
50 | }
51 |
52 | ///
53 | /// AWS Credential Version
54 | ///
55 | ///
56 | public S3Client(S3ClientOption option, AWSCredentials credential)
57 | {
58 | S3Config = new AmazonS3Config
59 | {
60 | RegionEndpoint = !string.IsNullOrEmpty(option.Region)
61 | ? RegionEndpoint.GetBySystemName(option.Region)
62 | : defaultEndPoint,
63 | };
64 | TransferConfig = new TransferUtilityConfig
65 | {
66 | MinSizeBeforePartUpload = 1024 * 1024 * 16, // 16MB
67 | ConcurrentServiceRequests = Environment.ProcessorCount * 2,
68 | };
69 | Client = new AmazonS3Client(credential, S3Config);
70 | Transfer = new TransferUtility(Client);
71 | Transfer2 = new TransferUtility(Client, TransferConfig);
72 | Option = option;
73 | }
74 |
75 | // Sync
76 |
77 | ///
78 | /// Synchronize Local files with S3. (Based on Localfiles.)
79 | ///
80 | ///
81 | ///
82 | /// Sample : e => Console.WriteLine($"{e.PercentDone}%, {e.FilePath}, {e}"
83 | ///
84 | public async Task SyncWithLocal(SlimFileInfo[] localFileInfos, string bucketName, string prefix, string ignorePrefix, Action uploadCallback)
85 | {
86 | TimeSpan diffBeforeSyncS3;
87 | TimeSpan diffBeforeSyncLocal;
88 | TimeSpan upload = TimeSpan.Zero;
89 | TimeSpan delete = TimeSpan.Zero;
90 | TimeSpan total;
91 | var sw = Stopwatch.StartNew();
92 |
93 | // Exponential backoff preset
94 | var exponentialBackoff = ExponentialBackoff.Preset.AwsOperation();
95 |
96 | try
97 | {
98 | // Obtain files from S3 bucket
99 | LogTitle("Start : Obtain S3 Items.");
100 | var s3ObjectList = string.IsNullOrEmpty(prefix)
101 | ? await ListAllObjectsAsync(bucketName)
102 | : await ListAllObjectsAsync(bucketName, prefix);
103 | var s3Objects = string.IsNullOrEmpty(ignorePrefix)
104 | ? s3ObjectList.ToS3Objects().ToArray()
105 | : s3ObjectList.ToS3Objects().IgnorePrefix(ignorePrefix).ToArray();
106 | diffBeforeSyncS3 = sw.Elapsed;
107 | Log($"Complete : Obtain S3 Items. {diffBeforeSyncS3.TotalSeconds.ToRound(2)}sec");
108 | sw.Restart();
109 |
110 | // Obtain current diff
111 | LogTitle("Start : Calculate Diff.");
112 | var statuses = GetSysncStatus(localFileInfos, s3Objects, prefix);
113 | diffBeforeSyncLocal = sw.Elapsed;
114 | Log($"Complete : Calculate Diff. {diffBeforeSyncLocal.TotalSeconds.ToRound(2)}sec");
115 | sw.Restart();
116 |
117 | // Diff result
118 | var skipFiles = statuses.Where(x => x.FileSyncStatus == FileSyncStatus.Sync).ToArray();
119 | var newFiles = statuses.Where(x => x.FileSyncStatus == FileSyncStatus.LocalOnly).ToArray();
120 | var updateFiles = statuses.Where(x => x.FileSyncStatus == FileSyncStatus.DiffExists).ToArray();
121 | var removeFiles = statuses.Where(x => x.FileSyncStatus == FileSyncStatus.RemoteOnly).ToArray();
122 | var syncResult = new SynchronizationResult()
123 | {
124 | Skip = skipFiles.Length,
125 | New = newFiles.Length,
126 | Update = updateFiles.Length,
127 | Remove = removeFiles.Length,
128 | DryRun = Option.DryRun,
129 | };
130 |
131 | if (Option.DryRun)
132 | {
133 | // Dry run only lease message
134 | LogTitle($"Skip : Dryrun is enabled. Skip Synchronize with S3. New = {newFiles.Length}, Update = {updateFiles.Length}, Remove = {removeFiles.Length}");
135 | }
136 | else
137 | {
138 | // Upload local files to s3 for diff files
139 | LogTitle($"Start : Upload to S3. New = {newFiles.Length}, Update = {updateFiles.Length})");
140 | await RetryableFileUploadAsync(bucketName, prefix, uploadCallback, exponentialBackoff, newFiles, updateFiles);
141 | upload = sw.Elapsed;
142 | Log($"Complete : Upload to S3. {upload.TotalSeconds.ToRound(2)}sec");
143 | sw.Restart();
144 |
145 | // Remove s3 items for diff item
146 | LogTitle($"Start : Remove item on S3. Remove = {removeFiles.Length}");
147 | if (removeFiles.Any())
148 | {
149 | await RetrybleFileDeleteAsync(bucketName, exponentialBackoff, removeFiles);
150 | }
151 | delete = sw.Elapsed;
152 | Log($"Complete : Remote item on S3. {delete.TotalSeconds.ToRound(2)}sec");
153 | }
154 |
155 | // Obtain sync result
156 | total = diffBeforeSyncS3 + diffBeforeSyncLocal + upload + delete;
157 | Log($@"
158 | ===============================================
159 | Detail Execution Time :
160 | -----------------------------------------------
161 | Obtain S3 Items : {diffBeforeSyncS3.TotalSeconds.ToRound(2)}sec
162 | Calculate Diff : {diffBeforeSyncLocal.TotalSeconds.ToRound(2)}sec
163 | Upload to S3 : {upload.TotalSeconds.ToRound(2)}sec {(Option.DryRun ? "(dry-run. skipped)" : "")}
164 | Delete on S3 : {delete.TotalSeconds.ToRound(2)}sec {(Option.DryRun ? "(dry-run. skipped)" : "")}
165 | -----------------------------------------------
166 | Total Execution : {total.TotalSeconds.ToRound(2)}sec, ({total.TotalMinutes.ToRound(2)}min)
167 | ===============================================");
168 | return syncResult;
169 | }
170 | finally
171 | {
172 | sw.Stop();
173 | sw = null;
174 | }
175 | }
176 |
177 | ///
178 | /// Upload with controling buffer and auto-retry on error.
179 | ///
180 | ///
181 | /// Due to AWS API limitation, it requires manage upload bandwith and error handling.
182 | /// Retry tactics will be following 2 patterns:
183 | /// 1. Continue upload when error happens. (Retry limit : defined retry count = retryLimit)
184 | /// - This will be use because it should not stop on error. Retry following process can continue or not.
185 | /// 2. When (1) successfully complete, include retry, error processing item list will be retry to upload.(Retry limit : defined retry count = retryLimit)
186 | /// - If 1 completed then retry errors again will be nice, isn't it?
187 | ///
188 | ///
189 | ///
190 | ///
191 | ///
192 | ///
193 | ///
194 | ///
195 | private async Task RetryableFileUploadAsync(string bucketName, string prefix, Action uploadCallback, ExponentialBackoff exponentialBackoff, S3FileHashStatus[] newFiles, S3FileHashStatus[] updateFiles)
196 | {
197 | var retryLimit = 5;
198 | var currentRetry = 0;
199 |
200 | var queueList = new List>();
201 | var retryQueueList = new List>();
202 |
203 | // Enqueue for upload planned items. This offers more availability when error happens
204 | foreach (var buffer in newFiles.Concat(updateFiles).Buffer(queueLength))
205 | {
206 | var requestQueue = new Queue(buffer.Length);
207 | foreach (var item in buffer)
208 | {
209 | requestQueue.Enqueue(item);
210 | }
211 | queueList.Add(requestQueue);
212 | }
213 |
214 | // Excute for each queue
215 | var result = await TryRetryableFileUploadAsyncCore(bucketName, prefix, uploadCallback, exponentialBackoff, queueList, retryQueueList);
216 |
217 | // Retry
218 | while (!result.success && retryQueueList.Any())
219 | {
220 | Warn($"Warning: Retrying failed items. {retryQueueList.Sum(x => x.Count)}items");
221 |
222 | // exchange QueueList items
223 | queueList.Clear();
224 | foreach (var retryItem in retryQueueList)
225 | {
226 | queueList.Add(retryItem);
227 | }
228 | retryQueueList.Clear();
229 |
230 | // execute for each queue
231 | result = await TryRetryableFileUploadAsyncCore(bucketName, prefix, uploadCallback, exponentialBackoff, queueList, retryQueueList);
232 |
233 | // increment retry count
234 | currentRetry++;
235 |
236 | // Throw if reached to retry limit.
237 | if (currentRetry >= retryLimit)
238 | {
239 | Error($"Error : Exceeded retry count limit ({currentRetry}/{retryLimit}). Stop execution.");
240 | throw result.exception;
241 | }
242 | }
243 | }
244 |
245 | private async Task<(bool success, AmazonS3Exception exception)> TryRetryableFileUploadAsyncCore(string bucketName, string prefix, Action uploadCallback, ExponentialBackoff exponentialBackoff, List> queueList, List> retryQueueList)
246 | {
247 | // How many times it retry uploading QueueList.
248 | var retryLimit = 5;
249 | var currentRetry = 0;
250 | AmazonS3Exception exception = null;
251 |
252 | foreach (var queue in queueList)
253 | {
254 | try
255 | {
256 | await ConcurretFileUploadAsync(bucketName, queue, prefix, uploadCallback);
257 | Log($"Partial Complete : Upload to S3. ({queue.Count})");
258 | }
259 | catch (AmazonS3Exception ex)
260 | {
261 | exception = ex;
262 | switch (ex.StatusCode)
263 | {
264 | case HttpStatusCode.ServiceUnavailable:
265 | {
266 | // Put error queue list into retry queue list.
267 | retryQueueList.Add(queue);
268 |
269 | // re-throw when retry limit exceeded.
270 | if (currentRetry >= retryLimit)
271 | {
272 | Error($"Error : Exceeded retry count limit ({currentRetry}/{retryLimit}). Stop execution.");
273 | throw ex;
274 | }
275 |
276 | // Request reejected because "Too many Request"? Wait for Exponential Backoff.
277 | // Sample Error :
278 | // (Status Code : 502) Unhandled Exception: Amazon.S3.AmazonS3Exception: Please reduce your request rate. --->Amazon.Runtime.Internal.HttpErrorResponseException: Exception of type 'Amazon.Runtime.Internal.HttpErrorResponseException' was thrown.
279 | var waitTime = exponentialBackoff.GetNextDelay();
280 | Warn($"Warning : Exception happen during upload, re-queue to last then wait {waitTime.TotalSeconds}sec for next retry. Exception count in Queue List ({currentRetry}/{retryLimit}). {ex.GetType().FullName}, {ex.Message}, {ex.StackTrace}");
281 |
282 | // Adjust next retry timing : wait for exponential Backoff
283 | await Task.Delay(waitTime);
284 |
285 | // increment retry count
286 | currentRetry++;
287 |
288 | continue;
289 | }
290 | default:
291 | throw ex;
292 | }
293 | }
294 | }
295 |
296 | return ((exception == null), exception);
297 | }
298 |
299 | ///
300 | /// Delete with controling buffer and auto-retry on error.
301 | ///
302 | ///
303 | /// Due to AWS API limitation, it requires manage delete bandwith and error handling.
304 | /// Retry tactics will be following 2 patterns:
305 | /// 1. Continue delete when error happens. (Retry limit : defined retry count = retryLimit)
306 | /// - This will be use because it should not stop on error. Retry following process can continue or not.
307 | /// 2. When (1) successfully complete, include retry, error processing item list will be retry to delete.(Retry limit : defined retry count = retryLimit)
308 | /// - If 1 completed then retry errors again will be nice, isn't it?
309 | ///
310 | ///
311 | ///
312 | ///
313 | ///
314 | private async Task RetrybleFileDeleteAsync(string bucketName, ExponentialBackoff exponentialBackoff, S3FileHashStatus[] targetFiles)
315 | {
316 | var retryLimit = 5;
317 | var currentRetry = 0;
318 |
319 | var queueList = new List>();
320 | var retryQueueList = new List>();
321 |
322 | // Enqueue for remove planned items. This offers more availability when error happens
323 | foreach (var buffer in targetFiles.Buffer(queueLength))
324 | {
325 | var requestQueue = new Queue(buffer.Length);
326 | foreach (var item in buffer)
327 | {
328 | requestQueue.Enqueue(item);
329 | }
330 | queueList.Add(requestQueue);
331 | }
332 |
333 | // execute for each queue
334 | var result = await TryRetrybleFileDeleteAsyncCore(bucketName, exponentialBackoff, queueList, retryQueueList);
335 |
336 | // Retry
337 | while (!result.success && retryQueueList.Any())
338 | {
339 | Warn($"Warning: Retrying failed items. {retryQueueList.Sum(x => x.Count)}items");
340 |
341 | // exchange QueueList items
342 | queueList.Clear();
343 | foreach (var retryItem in retryQueueList)
344 | {
345 | queueList.Add(retryItem);
346 | }
347 | retryQueueList.Clear();
348 |
349 | // execute for each queue
350 | result = await TryRetrybleFileDeleteAsyncCore(bucketName, exponentialBackoff, queueList, retryQueueList);
351 |
352 | // increment retry count
353 | currentRetry++;
354 |
355 | // Throw if reached to retry limit.
356 | if (currentRetry >= retryLimit)
357 | {
358 | Error($"Error : Exceeded retry count limit ({currentRetry}/{retryLimit}). Stop execution.");
359 | throw result.exception;
360 | }
361 | }
362 | }
363 |
364 | private async Task<(bool success, AmazonS3Exception exception)> TryRetrybleFileDeleteAsyncCore(string bucketName, ExponentialBackoff exponentialBackoff, List> queueList, List> retryQueueList)
365 | {
366 | // How many times it retry uploading QueueList.
367 | var retryLimit = 5;
368 | var currentRetry = 0;
369 | AmazonS3Exception exception = null;
370 |
371 | foreach (var queue in queueList)
372 | {
373 | try
374 | {
375 | await ConcurrentFileDeleteAsync(bucketName, queue);
376 | Log($"Partial Complete : Delete to S3. ({queue.Count})");
377 | }
378 | catch (AmazonS3Exception ex)
379 | {
380 | exception = ex;
381 | switch (ex.StatusCode)
382 | {
383 | case HttpStatusCode.ServiceUnavailable:
384 | {
385 | // Put error queue list into retry queue list.
386 | retryQueueList.Add(queue);
387 |
388 | // re-throw when retry limit exceeded.
389 | if (currentRetry >= retryLimit)
390 | {
391 | Error($"Error : Exceeded retry count limit ({currentRetry}/{retryLimit}). Stop execution.");
392 | throw ex;
393 | }
394 |
395 | // Request reejected because "Too many Request"? Wait for Exponential Backoff.
396 | // Sample : (Status Code : 502) Unhandled Exception: Amazon.S3.AmazonS3Exception: Please reduce your request rate. --->Amazon.Runtime.Internal.HttpErrorResponseException: Exception of type 'Amazon.Runtime.Internal.HttpErrorResponseException' was thrown.
397 | var waitTime = exponentialBackoff.GetNextDelay();
398 | Warn($"Warning : Exception happen during delete, re-queue to last then wait {waitTime.TotalSeconds}sec for next retry. Exception count in Queue List ({currentRetry}/{retryLimit}). {ex.GetType().FullName}, {ex.Message}, {ex.StackTrace}");
399 |
400 | // Adjust next retry timing : wait for exponential Backoff
401 | await Task.Delay(waitTime);
402 |
403 | // increment retry count
404 | currentRetry++;
405 |
406 | continue;
407 | }
408 | default:
409 | throw ex;
410 | }
411 | }
412 | }
413 |
414 | return ((exception == null), exception);
415 | }
416 |
417 | // List
418 |
419 | public async Task> ListAllObjectsAsync(string bucketName)
420 | {
421 | var list = new List();
422 | var res = await Client.ListObjectsV2Async(new ListObjectsV2Request
423 | {
424 | BucketName = bucketName,
425 | MaxKeys = int.MaxValue,
426 | });
427 | if (res != null)
428 | {
429 | list.Add(res);
430 | }
431 |
432 | while (!string.IsNullOrEmpty(res.NextContinuationToken))
433 | {
434 | res = await Client.ListObjectsV2Async(new ListObjectsV2Request
435 | {
436 | BucketName = bucketName,
437 | MaxKeys = int.MaxValue,
438 | ContinuationToken = res.NextContinuationToken,
439 | });
440 | if (res != null)
441 | {
442 | list.Add(res);
443 | }
444 | }
445 |
446 | return list;
447 | }
448 |
449 | public async Task> ListAllObjectsAsync(string bucketName, string prefix)
450 | {
451 | var list = new List();
452 | var res = await Client.ListObjectsV2Async(new ListObjectsV2Request
453 | {
454 | BucketName = bucketName,
455 | MaxKeys = int.MaxValue,
456 | Prefix = prefix,
457 | });
458 | if (res != null)
459 | {
460 | list.Add(res);
461 | }
462 |
463 | while (!string.IsNullOrEmpty(res.NextContinuationToken))
464 | {
465 | res = await Client.ListObjectsV2Async(new ListObjectsV2Request
466 | {
467 | BucketName = bucketName,
468 | MaxKeys = int.MaxValue,
469 | Prefix = prefix,
470 | ContinuationToken = res.NextContinuationToken,
471 | });
472 | if (res != null)
473 | {
474 | list.Add(res);
475 | }
476 | }
477 |
478 | return list;
479 | }
480 |
481 | public async Task> ListAllObjectsAsync(ListObjectsV2Request request)
482 | {
483 | var list = new List();
484 | var res = await ListObjectsAsync(request);
485 | if (res != null)
486 | {
487 | list.Add(res);
488 | }
489 |
490 | while (!string.IsNullOrEmpty(res.NextContinuationToken))
491 | {
492 | res = await ListObjectsAsync(request, res.NextContinuationToken);
493 | if (res != null)
494 | {
495 | list.Add(res);
496 | }
497 | }
498 |
499 | return list;
500 | }
501 |
502 | public async Task ListObjectsAsync(ListObjectsV2Request request)
503 | {
504 | var res = await Client.ListObjectsV2Async(request);
505 | return res;
506 | }
507 | public async Task ListObjectsAsync(ListObjectsV2Request request, string continuationToken)
508 | {
509 | request.ContinuationToken = continuationToken;
510 | var res = await Client.ListObjectsV2Async(request);
511 | return res;
512 | }
513 |
514 | // Get
515 |
516 | public async Task GetObjectsAsync(string bucketName, string keyName)
517 | {
518 | var res = await Client.GetObjectAsync(bucketName, keyName);
519 | return res;
520 | }
521 |
522 | public async Task GetMetaAsync(string bucketName, string keyName)
523 | {
524 | var res = await Client.GetObjectMetadataAsync(bucketName, keyName);
525 | return res;
526 | }
527 |
528 | // Upload
529 |
530 | public async Task ConcurretFileUploadAsync(string bucketName, IEnumerable targetFiles, Action uploadProgressEventAction)
531 | {
532 | var tasks = targetFiles.Select(async x =>
533 | {
534 | var request = new TransferUtilityUploadRequest
535 | {
536 | BucketName = bucketName,
537 | FilePath = x.FileInfo.Value.FullPath,
538 | Key = x.FileInfo.Value.MultiplatformRelativePath,
539 | PartSize = TransferConfig.MinSizeBeforePartUpload,
540 | StorageClass = S3StorageClass.Standard,
541 | };
542 | if (!string.IsNullOrEmpty(Option.ContentType))
543 | {
544 | request.ContentType = Option.ContentType;
545 | }
546 |
547 | if (uploadProgressEventAction != null)
548 | {
549 | request.UploadProgressEvent += (sender, e) =>
550 | {
551 | uploadProgressEventAction(e);
552 | };
553 | }
554 |
555 | await Transfer.UploadAsync(request);
556 | });
557 | await Task.WhenAll(tasks);
558 | }
559 |
560 | public async Task ConcurretFileUploadAsync(string bucketName, IEnumerable targetFiles, string prefix, Action uploadProgressEventAction)
561 | {
562 | var tasks = targetFiles.Select(async x =>
563 | {
564 | var request = new TransferUtilityUploadRequest
565 | {
566 | BucketName = bucketName,
567 | FilePath = x.FileInfo.Value.FullPath,
568 | Key = GetS3Key(prefix, x.FileInfo.Value.MultiplatformRelativePath),
569 | PartSize = TransferConfig.MinSizeBeforePartUpload,
570 | StorageClass = S3StorageClass.Standard,
571 | };
572 | if (!string.IsNullOrEmpty(Option.ContentType))
573 | {
574 | request.ContentType = Option.ContentType;
575 | }
576 |
577 | if (uploadProgressEventAction != null)
578 | {
579 | request.UploadProgressEvent += (sender, e) =>
580 | {
581 | uploadProgressEventAction(e);
582 | };
583 | }
584 |
585 | await Transfer.UploadAsync(request);
586 | });
587 | await Task.WhenAll(tasks);
588 | }
589 |
590 | // Delete
591 |
592 | public async Task ConcurrentFileDeleteAsync(string bucketName, IEnumerable targetFiles)
593 | {
594 | var tasks = targetFiles.Select(async x =>
595 | {
596 | var request = new DeleteObjectRequest
597 | {
598 | BucketName = bucketName,
599 | Key = x.S3Object.Key,
600 | };
601 |
602 | await Client.DeleteObjectAsync(request);
603 | });
604 | await Task.WhenAll(tasks);
605 | }
606 |
607 | public async Task ConcurrentFileDeleteAsync(string bucketName, string prefix, IEnumerable targetFiles)
608 | {
609 | var tasks = targetFiles.Select(async x =>
610 | {
611 | var request = new DeleteObjectRequest
612 | {
613 | BucketName = bucketName,
614 | Key = GetS3Key(prefix, x.S3Object.Key),
615 | };
616 | await Client.DeleteObjectAsync(request);
617 | });
618 | await Task.WhenAll(tasks);
619 | }
620 |
621 | // Diff
622 |
623 | public S3FileHashStatus[] GetSysncStatus(IEnumerable localFiles, IEnumerable s3Objects, string prefix)
624 | {
625 | if (localFiles == null) throw new ArgumentNullException(nameof(localFiles));
626 | if (s3Objects == null) throw new ArgumentNullException(nameof(s3Objects));
627 |
628 | // Dictionary for Remote S3 and Local File
629 | var s3Dictionary = s3Objects.ToDictionary(x => x.Key, x => x);
630 | var localDictionary = localFiles.ToDictionary(x => GetS3Key(prefix, x.MultiplatformRelativePath), x => x);
631 |
632 | // Get State for Local files
633 | S3FileHashStatus[] statuses = null;
634 | var localExists = localDictionary.Select(x =>
635 | {
636 | int chunkSize = 0;
637 | string fileETag = "";
638 | if (s3Dictionary.TryGetValue(x.Key, out S3Object s3Object))
639 | {
640 | var bytes = FileHashHelper.GetFileBinary(x.Value.FullPath);
641 | var s3ETagChunkCount = s3Object.GetETagChunkCount();
642 | chunkSize = FileHashHelper.GetChunkSize(bytes.Length, s3ETagChunkCount);
643 | fileETag = FileHashHelper.CalculateEtag(bytes, chunkSize);
644 | }
645 |
646 | return new S3FileHashStatus(x.Value, fileETag, chunkSize, s3Object);
647 | })
648 | .ToArray();
649 |
650 | // Get State for Remote S3
651 | var remoteOnly = s3Objects
652 | .Where(x => !localDictionary.TryGetValue(x.Key, out var slimFileInfo))
653 | .Select(x => new S3FileHashStatus(null, "", 0, x))
654 | .ToArray();
655 |
656 | // Concat local and remote
657 | statuses = localExists.Concat(remoteOnly).ToArray();
658 |
659 | return statuses;
660 | }
661 |
662 | public void Dispose()
663 | {
664 | Client?.Dispose();
665 | Transfer?.Dispose();
666 | }
667 |
668 | // Helper
669 |
670 | private static string GetS3Key(string keyPrefix, string key)
671 | {
672 | return string.IsNullOrEmpty(keyPrefix) ? key : $"{keyPrefix}/{key}";
673 | }
674 |
675 | // Logger
676 |
677 | public static void Error(string text)
678 | {
679 | Console.Error.WriteLine(text);
680 | }
681 |
682 | public static void Warn(string text)
683 | {
684 | Log(text, ConsoleColor.DarkYellow);
685 | }
686 |
687 | private static void LogTitle(string text)
688 | {
689 | Log($@"
690 | -----------------------------------------------
691 | {text}
692 | -----------------------------------------------", ConsoleColor.White);
693 | }
694 |
695 | public static void Log(string text)
696 | {
697 | Log(text, ConsoleColor.DarkGray);
698 | }
699 |
700 | private static void Log(string text, ConsoleColor color)
701 | {
702 | lock (typeof(S3Client))
703 | {
704 | var oldColor = Console.ForegroundColor;
705 | if (oldColor != color)
706 | {
707 | Console.ForegroundColor = color;
708 | }
709 |
710 | Console.WriteLine(text);
711 |
712 | if (oldColor != color)
713 | {
714 | Console.ForegroundColor = oldColor;
715 | }
716 | }
717 | }
718 | }
719 | }
720 |
--------------------------------------------------------------------------------