├── .blazar-enabled ├── slimfast-plugin ├── src │ └── main │ │ └── java │ │ └── com │ │ └── hubspot │ │ └── maven │ │ └── plugins │ │ └── slimfast │ │ ├── ArtifactWrapper.java │ │ ├── FileUploader.java │ │ ├── FileDownloader.java │ │ ├── LocalArtifact.java │ │ ├── DryRunFileUploader.java │ │ ├── S3ArtifactWrapper.java │ │ ├── ClasspathConfiguration.java │ │ ├── LocalArtifactWrapper.java │ │ ├── PreparedArtifact.java │ │ ├── UploadJarsMojo.java │ │ ├── PreparedArtifactWrapper.java │ │ ├── DownloadConfiguration.java │ │ ├── UploadFromManifestMojo.java │ │ ├── S3Configuration.java │ │ ├── UploadConfiguration.java │ │ ├── FileHelper.java │ │ ├── S3Factory.java │ │ ├── S3Artifact.java │ │ ├── CopyJarsMojo.java │ │ ├── WriteManifestMojo.java │ │ ├── BaseFileUploader.java │ │ ├── DownloadJarsMojo.java │ │ ├── DefaultFileUploader.java │ │ ├── BaseUploadMojo.java │ │ ├── JsonHelper.java │ │ ├── DefaultFileDownloader.java │ │ └── ArtifactHelper.java ├── pom.xml └── README.md ├── README.md ├── slimfast-hadoop ├── pom.xml ├── README.md └── src │ └── main │ └── java │ └── com │ └── hubspot │ └── slimfast │ └── hadoop │ ├── SlimfastHadoopConfiguration.java │ └── HadoopHelper.java ├── pom.xml └── LICENSE.md /.blazar-enabled: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/ArtifactWrapper.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.nio.file.Path; 4 | import java.util.Set; 5 | 6 | public interface ArtifactWrapper { 7 | Path getPrefix(); 8 | 9 | Set getLocalArtifacts(); 10 | } 11 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/FileUploader.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.io.Closeable; 4 | import java.util.Set; 5 | import org.apache.maven.plugin.MojoExecutionException; 6 | import org.apache.maven.plugin.MojoFailureException; 7 | 8 | public interface FileUploader extends Closeable { 9 | void init(UploadConfiguration config); 10 | Set upload(Set artifacts) 11 | throws MojoExecutionException, MojoFailureException; 12 | } 13 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/FileDownloader.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.io.Closeable; 4 | import java.util.Set; 5 | import org.apache.maven.plugin.MojoExecutionException; 6 | import org.apache.maven.plugin.MojoFailureException; 7 | 8 | public interface FileDownloader extends Closeable { 9 | void init(DownloadConfiguration config) 10 | throws MojoExecutionException, MojoFailureException; 11 | void download(Set artifact) 12 | throws MojoExecutionException, MojoFailureException; 13 | } 14 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/LocalArtifact.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.nio.file.Path; 4 | 5 | public class LocalArtifact { 6 | 7 | private final Path localPath; 8 | private final Path targetPath; 9 | 10 | public LocalArtifact(Path localPath, Path targetPath) { 11 | this.localPath = localPath; 12 | this.targetPath = targetPath; 13 | } 14 | 15 | public Path getLocalPath() { 16 | return localPath; 17 | } 18 | 19 | public Path getTargetPath() { 20 | return targetPath; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/DryRunFileUploader.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.io.IOException; 4 | import java.util.Set; 5 | import java.util.concurrent.ExecutionException; 6 | import java.util.concurrent.TimeoutException; 7 | 8 | public class DryRunFileUploader extends BaseFileUploader { 9 | 10 | @Override 11 | protected void doUpload(Set artifacts) 12 | throws ExecutionException, InterruptedException, TimeoutException {} 13 | 14 | @Override 15 | public void close() throws IOException {} 16 | } 17 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/S3ArtifactWrapper.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.util.Set; 4 | 5 | public class S3ArtifactWrapper { 6 | 7 | private final String prefix; 8 | private final Set artifacts; 9 | 10 | public S3ArtifactWrapper(String prefix, Set artifacts) { 11 | this.prefix = prefix; 12 | this.artifacts = artifacts; 13 | } 14 | 15 | public String getPrefix() { 16 | return prefix; 17 | } 18 | 19 | public Set getArtifacts() { 20 | return artifacts; 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/ClasspathConfiguration.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.nio.file.Path; 4 | import java.util.Set; 5 | 6 | public class ClasspathConfiguration { 7 | 8 | private final Path prefix; 9 | private final Set entries; 10 | 11 | public ClasspathConfiguration(Path prefix, Set entries) { 12 | this.prefix = prefix; 13 | this.entries = entries; 14 | } 15 | 16 | public Path getPrefix() { 17 | return prefix; 18 | } 19 | 20 | public Set getEntries() { 21 | return entries; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/LocalArtifactWrapper.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.nio.file.Path; 4 | import java.util.Set; 5 | 6 | public class LocalArtifactWrapper implements ArtifactWrapper { 7 | 8 | private final Path prefix; 9 | private final Set artifacts; 10 | 11 | public LocalArtifactWrapper(Path prefix, Set artifacts) { 12 | this.prefix = prefix; 13 | this.artifacts = artifacts; 14 | } 15 | 16 | public Path getPrefix() { 17 | return prefix; 18 | } 19 | 20 | public Set getArtifacts() { 21 | return artifacts; 22 | } 23 | 24 | @Override 25 | public Set getLocalArtifacts() { 26 | return artifacts; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SlimFast (unrelated to the delicious beverage) 2 | 3 | ## Overview ## 4 | 5 | SlimFast is a tool for Java apps to help them stop building fat jars for deployment (massive jars containing 6 | all of the app's dependencies). Building fat jars is slow and adds a lot of complexity (subtle or not so subtle 7 | bugs can occur when jars being merged have duplicate files for example). 8 | 9 | The first part is the maven plugin, which can be used in place of the maven-assembly-plugin or maven-shade-plugin 10 | (which are often used to build fat jars). The other part is a helper library for hadoop to write each job's dependencies 11 | to hdfs and add them to the job's classpath. 12 | 13 | ## Usage ## 14 | 15 | See [here](slimfast-plugin/README.md) for usage instructions for the maven plugin or [here](slimfast-hadoop/README.md) for the hadoop library. 16 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/PreparedArtifact.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.nio.file.Path; 4 | import java.nio.file.Paths; 5 | 6 | public class PreparedArtifact { 7 | 8 | private final String localPath; 9 | private final String targetPath; 10 | 11 | public PreparedArtifact(String localPath, String targetPath) { 12 | this.localPath = localPath; 13 | this.targetPath = targetPath; 14 | } 15 | 16 | public String getLocalPath() { 17 | return localPath; 18 | } 19 | 20 | public String getTargetPath() { 21 | return targetPath; 22 | } 23 | 24 | public LocalArtifact toLocalArtifact() { 25 | Path targetPath = Paths.get(getTargetPath()); 26 | Path localPath = Paths.get(getLocalPath()); 27 | 28 | return new LocalArtifact(localPath, targetPath); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/UploadJarsMojo.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import javax.inject.Inject; 4 | import org.apache.maven.plugin.MojoExecutionException; 5 | import org.apache.maven.plugins.annotations.LifecyclePhase; 6 | import org.apache.maven.plugins.annotations.Mojo; 7 | import org.apache.maven.plugins.annotations.ResolutionScope; 8 | 9 | @Mojo( 10 | name = "upload", 11 | defaultPhase = LifecyclePhase.DEPLOY, 12 | threadSafe = true, 13 | requiresDependencyResolution = ResolutionScope.RUNTIME 14 | ) 15 | public class UploadJarsMojo extends BaseUploadMojo { 16 | 17 | @Inject 18 | private ArtifactHelper artifactHelper; 19 | 20 | @Override 21 | protected ArtifactWrapper getArtifactWrapper() throws MojoExecutionException { 22 | return artifactHelper.getArtifactPaths(); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /slimfast-hadoop/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | 5 | 6 | com.hubspot.slimfast 7 | slimfast-parent 8 | 1.0.5-SNAPSHOT 9 | 10 | 11 | slimfast-hadoop 12 | 13 | ${project.groupId}:${project.artifactId} 14 | 15 | 16 | 17 | org.slf4j 18 | slf4j-api 19 | 20 | 21 | org.apache.hadoop 22 | hadoop-common 23 | provided 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/PreparedArtifactWrapper.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.nio.file.Path; 4 | import java.util.LinkedHashSet; 5 | import java.util.Set; 6 | import java.util.stream.Collectors; 7 | 8 | public class PreparedArtifactWrapper implements ArtifactWrapper { 9 | 10 | private final Path prefix; 11 | private final Set artifacts; 12 | 13 | public PreparedArtifactWrapper(Path prefix, Set artifacts) { 14 | this.prefix = prefix; 15 | this.artifacts = artifacts; 16 | } 17 | 18 | public Path getPrefix() { 19 | return prefix; 20 | } 21 | 22 | public Set getArtifacts() { 23 | return artifacts; 24 | } 25 | 26 | @Override 27 | public Set getLocalArtifacts() { 28 | return artifacts == null 29 | ? Set.of() 30 | : artifacts 31 | .stream() 32 | .map(PreparedArtifact::toLocalArtifact) 33 | .collect(Collectors.toCollection(LinkedHashSet::new)); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/DownloadConfiguration.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.nio.file.Path; 4 | 5 | public class DownloadConfiguration { 6 | 7 | private final Path prefix; 8 | private final Path cacheDirectory; 9 | private final Path outputDirectory; 10 | 11 | private final S3Configuration s3Configuration; 12 | 13 | public DownloadConfiguration( 14 | S3Configuration s3Configuration, 15 | Path prefix, 16 | Path cacheDirectory, 17 | Path outputDirectory 18 | ) { 19 | this.s3Configuration = s3Configuration; 20 | this.prefix = prefix; 21 | this.cacheDirectory = cacheDirectory; 22 | this.outputDirectory = outputDirectory; 23 | } 24 | 25 | public Path getPrefix() { 26 | return prefix; 27 | } 28 | 29 | public Path getCacheDirectory() { 30 | return cacheDirectory; 31 | } 32 | 33 | public Path getOutputDirectory() { 34 | return outputDirectory; 35 | } 36 | 37 | public S3Configuration getS3Configuration() { 38 | return s3Configuration; 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/UploadFromManifestMojo.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.io.IOException; 4 | import java.nio.file.Paths; 5 | import org.apache.maven.plugin.MojoExecutionException; 6 | import org.apache.maven.plugins.annotations.Mojo; 7 | import org.apache.maven.plugins.annotations.Parameter; 8 | import org.apache.maven.plugins.annotations.ResolutionScope; 9 | 10 | @Mojo( 11 | name = "upload-from-manifest", 12 | threadSafe = true, 13 | requiresDependencyResolution = ResolutionScope.RUNTIME 14 | ) 15 | public class UploadFromManifestMojo extends BaseUploadMojo { 16 | 17 | @Parameter( 18 | property = "slimfast.manifestFile", 19 | defaultValue = "${project.build.directory}/slimfast-local.json" 20 | ) 21 | private String manifestFile; 22 | 23 | @Override 24 | protected ArtifactWrapper getArtifactWrapper() throws MojoExecutionException { 25 | try { 26 | return JsonHelper.readPreparedArtifactsFromJson(Paths.get(manifestFile).toFile()); 27 | } catch (IOException e) { 28 | throw new MojoExecutionException("Failed to read manifest file", e); 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/S3Configuration.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.util.Optional; 4 | import software.amazon.awssdk.regions.Region; 5 | 6 | public class S3Configuration { 7 | 8 | private final Optional accessKey; 9 | private final Optional secretKey; 10 | 11 | private final Optional region; 12 | private final Optional targetThroughputGbps; 13 | private final Optional minPartSizeBytes; 14 | 15 | public S3Configuration( 16 | Optional accessKey, 17 | Optional secretKey, 18 | Optional region, 19 | Optional targetThroughputGbps, 20 | Optional minPartSizeBytes 21 | ) { 22 | this.accessKey = accessKey; 23 | this.secretKey = secretKey; 24 | this.region = region; 25 | this.targetThroughputGbps = targetThroughputGbps; 26 | this.minPartSizeBytes = minPartSizeBytes; 27 | } 28 | 29 | public Optional getAccessKey() { 30 | return accessKey; 31 | } 32 | 33 | public Optional getSecretKey() { 34 | return secretKey; 35 | } 36 | 37 | public Optional getRegion() { 38 | return region; 39 | } 40 | 41 | public Optional getTargetThroughputGbps() { 42 | return targetThroughputGbps; 43 | } 44 | 45 | public Optional getMinPartSizeBytes() { 46 | return minPartSizeBytes; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/UploadConfiguration.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.nio.file.Path; 4 | 5 | public class UploadConfiguration { 6 | 7 | private final Path prefix; 8 | private final String s3Bucket; 9 | private final String s3ArtifactRoot; 10 | private final Path outputFile; 11 | private final boolean allowUnresolvedSnapshots; 12 | 13 | private final S3Configuration s3Configuration; 14 | 15 | public UploadConfiguration( 16 | S3Configuration s3Configuration, 17 | Path prefix, 18 | String s3Bucket, 19 | String s3ArtifactRoot, 20 | Path outputFile, 21 | boolean allowUnresolvedSnapshots 22 | ) { 23 | this.s3Configuration = s3Configuration; 24 | this.prefix = prefix; 25 | this.s3Bucket = s3Bucket; 26 | this.s3ArtifactRoot = s3ArtifactRoot; 27 | this.outputFile = outputFile; 28 | this.allowUnresolvedSnapshots = allowUnresolvedSnapshots; 29 | } 30 | 31 | public Path getPrefix() { 32 | return prefix; 33 | } 34 | 35 | public S3Configuration getS3Configuration() { 36 | return s3Configuration; 37 | } 38 | 39 | public String getS3Bucket() { 40 | return s3Bucket; 41 | } 42 | 43 | public String getS3ArtifactRoot() { 44 | return s3ArtifactRoot; 45 | } 46 | 47 | public Path getOutputFile() { 48 | return outputFile; 49 | } 50 | 51 | public boolean isAllowUnresolvedSnapshots() { 52 | return allowUnresolvedSnapshots; 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/FileHelper.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import com.google.common.hash.Hashing; 4 | import java.io.IOException; 5 | import java.io.UncheckedIOException; 6 | import java.nio.file.Files; 7 | import java.nio.file.Path; 8 | import java.nio.file.StandardCopyOption; 9 | import javax.annotation.Nullable; 10 | 11 | public class FileHelper { 12 | 13 | public static void ensureDirectoryExists(@Nullable Path path) { 14 | if (path != null && !Files.exists(path)) { 15 | try { 16 | Files.createDirectories(path); 17 | } catch (IOException e) { 18 | throw new UncheckedIOException( 19 | "Error creating parent directories for path " + path, 20 | e 21 | ); 22 | } 23 | } 24 | } 25 | 26 | public static void atomicMove(Path sourcePath, Path destPath) { 27 | try { 28 | Files.move(sourcePath, destPath, StandardCopyOption.ATOMIC_MOVE); 29 | } catch (IOException e) { 30 | throw new UncheckedIOException(e); 31 | } 32 | } 33 | 34 | public static String md5(Path path) { 35 | try { 36 | return com.google.common.io.Files.hash(path.toFile(), Hashing.md5()).toString(); 37 | } catch (IOException e) { 38 | throw new UncheckedIOException("Error reading file at path: " + path, e); 39 | } 40 | } 41 | 42 | public static long size(Path path) { 43 | try { 44 | return Files.size(path); 45 | } catch (IOException e) { 46 | throw new UncheckedIOException("Error reading file at path: " + path, e); 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /slimfast-hadoop/README.md: -------------------------------------------------------------------------------- 1 | # slimfast-hadoop 2 | 3 | ## Overview ## 4 | 5 | One place where fat jars can be convenient is for frameworks like Hadoop that ship your application jar around. 6 | If you build a fat jar, your dependencies automatically go along for the ride and everything works great. If 7 | you stop building a fat jar, however, you need to handle this manually so that your dependencies are available 8 | when the application starts up on the other side. 9 | 10 | This microlibrary makes that easier by finding all of the dependency jars, writing them to HDFS (if they don't 11 | already exist), and adding them to the job's classpath so everything should work transparently. Because it only 12 | writes jars to HDFS that don't already exist, after your job runs the first time this step should usually be a 13 | no-op. And because the job jar is so much smaller, we found that switching away from fat jars drastically 14 | improved our hadoop job launch speeds. 15 | 16 | ## Usage ## 17 | 18 | First you build a `SlimfastHadoopConfiguration`. This contains the path to the main application jar, the root 19 | folder on hdfs you want to use to store dependency jars (defaults to `/jars` if not specified), and the Hadoop 20 | `Configuration`. Then you pass this configuration to the `HadoopHelper` which finds all the dependency jars, 21 | writes them to HDFS, and adds them to the job's classpath. A minimal invocation would look like: 22 | 23 | ```java 24 | SlimfastHadoopConfiguration slimfastConfiguration = SlimfastHadoopConfiguration.newBuilder() 25 | .setJarByClass(MyJob.class) 26 | .setConfiguration(configuration) 27 | .build(); 28 | 29 | HadoopHelper.writeJarsToHdfsAndAddToClasspath(slimfastConfiguration); 30 | ``` 31 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/S3Factory.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; 4 | import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; 5 | import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; 6 | import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider; 7 | import software.amazon.awssdk.services.s3.S3AsyncClient; 8 | import software.amazon.awssdk.services.s3.S3CrtAsyncClientBuilder; 9 | import software.amazon.awssdk.transfer.s3.S3TransferManager; 10 | 11 | public class S3Factory { 12 | 13 | @SuppressFBWarnings("CT_CONSTRUCTOR_THROW") 14 | private S3Factory() { 15 | throw new AssertionError(); 16 | } 17 | 18 | public static S3AsyncClient createS3AsyncClient(S3Configuration config) { 19 | AwsCredentialsProvider credentialsProvider = null; 20 | if (config.getAccessKey().isPresent() && config.getSecretKey().isPresent()) { 21 | credentialsProvider = 22 | StaticCredentialsProvider.create( 23 | AwsBasicCredentials.create( 24 | config.getAccessKey().get(), 25 | config.getSecretKey().get() 26 | ) 27 | ); 28 | } 29 | 30 | S3CrtAsyncClientBuilder clientBuilder = S3AsyncClient 31 | .crtBuilder() 32 | .credentialsProvider(credentialsProvider); 33 | 34 | config.getRegion().ifPresent(clientBuilder::region); 35 | config.getTargetThroughputGbps().ifPresent(clientBuilder::targetThroughputInGbps); 36 | config.getMinPartSizeBytes().ifPresent(clientBuilder::minimumPartSizeInBytes); 37 | 38 | return clientBuilder.build(); 39 | } 40 | 41 | public static S3TransferManager createTransferManager(S3AsyncClient s3AsyncClient) { 42 | return S3TransferManager.builder().s3Client(s3AsyncClient).build(); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/S3Artifact.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.nio.file.Path; 4 | import java.util.Objects; 5 | import java.util.Optional; 6 | 7 | public class S3Artifact { 8 | 9 | private final String bucket; 10 | private final String key; 11 | private final Optional localPath; 12 | private final String targetPath; 13 | private final String md5; 14 | private final long size; 15 | 16 | public S3Artifact(String bucket, String key, String targetPath, String md5, long size) { 17 | this(bucket, key, null, targetPath, md5, size); 18 | } 19 | 20 | public S3Artifact( 21 | String bucket, 22 | String key, 23 | Path localPath, 24 | String targetPath, 25 | String md5, 26 | long size 27 | ) { 28 | this.bucket = bucket; 29 | this.key = key; 30 | this.localPath = Optional.ofNullable(localPath); 31 | this.targetPath = targetPath; 32 | this.md5 = md5; 33 | this.size = size; 34 | } 35 | 36 | public String getBucket() { 37 | return bucket; 38 | } 39 | 40 | public String getKey() { 41 | return key; 42 | } 43 | 44 | public Optional getLocalPath() { 45 | return localPath; 46 | } 47 | 48 | public String getTargetPath() { 49 | return targetPath; 50 | } 51 | 52 | public String getMd5() { 53 | return md5; 54 | } 55 | 56 | public long getSize() { 57 | return size; 58 | } 59 | 60 | @Override 61 | public boolean equals(Object o) { 62 | if (this == o) { 63 | return true; 64 | } 65 | 66 | if (o == null || getClass() != o.getClass()) { 67 | return false; 68 | } 69 | 70 | S3Artifact artifact = (S3Artifact) o; 71 | return ( 72 | Objects.equals(size, artifact.size) && 73 | Objects.equals(bucket, artifact.bucket) && 74 | Objects.equals(key, artifact.key) && 75 | Objects.equals(targetPath, artifact.targetPath) && 76 | Objects.equals(md5, artifact.md5) 77 | ); 78 | } 79 | 80 | @Override 81 | public int hashCode() { 82 | return Objects.hash(bucket, key, targetPath, md5, size); 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/CopyJarsMojo.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.io.IOException; 4 | import java.nio.file.Files; 5 | import java.nio.file.Path; 6 | import java.nio.file.Paths; 7 | import java.nio.file.StandardCopyOption; 8 | import javax.inject.Inject; 9 | import org.apache.maven.plugin.AbstractMojo; 10 | import org.apache.maven.plugin.MojoExecutionException; 11 | import org.apache.maven.plugin.MojoFailureException; 12 | import org.apache.maven.plugins.annotations.LifecyclePhase; 13 | import org.apache.maven.plugins.annotations.Mojo; 14 | import org.apache.maven.plugins.annotations.Parameter; 15 | import org.apache.maven.plugins.annotations.ResolutionScope; 16 | 17 | @Mojo( 18 | name = "copy", 19 | defaultPhase = LifecyclePhase.PACKAGE, 20 | threadSafe = true, 21 | requiresDependencyResolution = ResolutionScope.RUNTIME 22 | ) 23 | public class CopyJarsMojo extends AbstractMojo { 24 | 25 | @Inject 26 | private ArtifactHelper artifactHelper; 27 | 28 | @Parameter(property = "slimfast.outputDirectory", defaultValue = "${basedir}") 29 | private String outputDirectory; 30 | 31 | @Parameter(property = "slimfast.plugin.skip", defaultValue = "false") 32 | private boolean skip; 33 | 34 | @Override 35 | public void execute() throws MojoExecutionException, MojoFailureException { 36 | if (skip) { 37 | getLog().info("Skipping plugin execution"); 38 | return; 39 | } 40 | 41 | LocalArtifactWrapper artifactWrapper = artifactHelper.getArtifactPaths(); 42 | Path prefix = artifactWrapper.getPrefix(); 43 | 44 | for (LocalArtifact artifact : artifactWrapper.getArtifacts()) { 45 | Path localPath = artifact.getLocalPath(); 46 | Path targetPath = Paths 47 | .get(outputDirectory) 48 | .resolve(prefix) 49 | .resolve(artifact.getTargetPath()); 50 | FileHelper.ensureDirectoryExists(targetPath.getParent()); 51 | 52 | try { 53 | Files.copy(localPath, targetPath, StandardCopyOption.REPLACE_EXISTING); 54 | } catch (IOException e) { 55 | throw new MojoFailureException( 56 | String.format("Error moving file from %s to %s", localPath, targetPath), 57 | e 58 | ); 59 | } 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/WriteManifestMojo.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.io.IOException; 4 | import java.nio.file.Path; 5 | import java.nio.file.Paths; 6 | import java.util.HashSet; 7 | import java.util.Set; 8 | import javax.inject.Inject; 9 | import org.apache.maven.plugin.AbstractMojo; 10 | import org.apache.maven.plugin.MojoExecutionException; 11 | import org.apache.maven.plugin.MojoFailureException; 12 | import org.apache.maven.plugins.annotations.Mojo; 13 | import org.apache.maven.plugins.annotations.Parameter; 14 | import org.apache.maven.plugins.annotations.ResolutionScope; 15 | 16 | @Mojo( 17 | name = "write-manifest", 18 | threadSafe = true, 19 | requiresDependencyResolution = ResolutionScope.RUNTIME 20 | ) 21 | public class WriteManifestMojo extends AbstractMojo { 22 | 23 | @Inject 24 | private ArtifactHelper artifactHelper; 25 | 26 | @Parameter(property = "slimfast.plugin.skip", defaultValue = "false") 27 | private boolean skip; 28 | 29 | @Parameter( 30 | property = "slimfast.outputFile", 31 | defaultValue = "${project.build.directory}/slimfast-local.json" 32 | ) 33 | private String outputFile; 34 | 35 | @Override 36 | public void execute() throws MojoExecutionException, MojoFailureException { 37 | if (skip) { 38 | getLog().info("Skipping plugin execution"); 39 | return; 40 | } 41 | 42 | LocalArtifactWrapper artifactWrapper = artifactHelper.getArtifactPaths(); 43 | Path outputFile = Paths.get(this.outputFile); 44 | FileHelper.ensureDirectoryExists(outputFile.getParent()); 45 | Path prefix = artifactWrapper.getPrefix(); 46 | 47 | Set s3Artifacts = new HashSet<>(); 48 | for (LocalArtifact artifact : artifactWrapper.getArtifacts()) { 49 | s3Artifacts.add(prepareArtifact(artifact)); 50 | } 51 | 52 | PreparedArtifactWrapper preparedArtifactWrapper = new PreparedArtifactWrapper( 53 | prefix, 54 | s3Artifacts 55 | ); 56 | try { 57 | JsonHelper.writeArtifactsToJson(outputFile.toFile(), preparedArtifactWrapper); 58 | } catch (IOException e) { 59 | throw new MojoFailureException("Failed writing manifest file to disk", e); 60 | } 61 | } 62 | 63 | private PreparedArtifact prepareArtifact(LocalArtifact artifact) { 64 | return new PreparedArtifact( 65 | artifact.getLocalPath().toString(), 66 | artifact.getTargetPath().toString() 67 | ); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/BaseFileUploader.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.io.IOException; 4 | import java.nio.file.Paths; 5 | import java.util.LinkedHashSet; 6 | import java.util.Set; 7 | import java.util.concurrent.ExecutionException; 8 | import java.util.concurrent.TimeoutException; 9 | import java.util.stream.Collectors; 10 | import org.apache.maven.plugin.MojoExecutionException; 11 | import org.apache.maven.plugin.MojoFailureException; 12 | 13 | public abstract class BaseFileUploader implements FileUploader { 14 | 15 | private UploadConfiguration config; 16 | 17 | @Override 18 | public void init(UploadConfiguration config) { 19 | this.config = config; 20 | } 21 | 22 | protected abstract void doUpload(Set artifacts) 23 | throws ExecutionException, InterruptedException, TimeoutException; 24 | 25 | @Override 26 | public Set upload(Set artifacts) 27 | throws MojoExecutionException, MojoFailureException { 28 | Set s3Artifacts = artifacts 29 | .stream() 30 | .map(artifact -> 31 | new S3Artifact( 32 | config.getS3Bucket(), 33 | getS3Key(artifact), 34 | artifact.getLocalPath(), 35 | config.getPrefix().resolve(artifact.getTargetPath()).toString(), 36 | FileHelper.md5(artifact.getLocalPath()), 37 | FileHelper.size(artifact.getLocalPath()) 38 | ) 39 | ) 40 | .collect(Collectors.toCollection(LinkedHashSet::new)); 41 | 42 | try { 43 | doUpload(s3Artifacts); 44 | } catch (Exception e) { 45 | throw new MojoExecutionException("Failed to upload artifacts", e); 46 | } 47 | 48 | try { 49 | JsonHelper.writeArtifactsToJson( 50 | config.getOutputFile().toFile(), 51 | new S3ArtifactWrapper(config.getPrefix().toString(), s3Artifacts) 52 | ); 53 | } catch (IOException e) { 54 | throw new MojoExecutionException("Error writing dependencies json to file", e); 55 | } 56 | 57 | return s3Artifacts; 58 | } 59 | 60 | private String getS3Key(LocalArtifact artifact) { 61 | final String s3Key; 62 | 63 | String file = artifact.getTargetPath().toString(); 64 | boolean isUnresolvedSnapshot = file.toUpperCase().endsWith("-SNAPSHOT.JAR"); 65 | 66 | if (isUnresolvedSnapshot) { 67 | if (config.isAllowUnresolvedSnapshots()) { 68 | String start = file.substring(0, file.length() - ".JAR".length()); 69 | String end = file.substring(file.length() - ".JAR".length()); 70 | String md5 = FileHelper.md5(artifact.getLocalPath()); 71 | s3Key = 72 | Paths 73 | .get(config.getS3ArtifactRoot()) 74 | .resolve(start + "-" + md5 + end) 75 | .toString(); 76 | } else { 77 | throw new IllegalStateException("Encountered unresolved snapshot: " + file); 78 | } 79 | } else { 80 | s3Key = Paths.get(config.getS3ArtifactRoot()).resolve(file).toString(); 81 | } 82 | 83 | return s3Key; 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /slimfast-hadoop/src/main/java/com/hubspot/slimfast/hadoop/SlimfastHadoopConfiguration.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.slimfast.hadoop; 2 | 3 | import java.net.URL; 4 | import java.nio.file.Path; 5 | import java.nio.file.Paths; 6 | import org.apache.hadoop.conf.Configuration; 7 | 8 | public class SlimfastHadoopConfiguration { 9 | 10 | private final Path jarDirectory; 11 | private final Path hdfsArtifactRoot; 12 | private final Configuration configuration; 13 | 14 | private SlimfastHadoopConfiguration( 15 | Path jarDirectory, 16 | Path hdfsArtifactRoot, 17 | Configuration configuration 18 | ) { 19 | this.jarDirectory = jarDirectory; 20 | this.hdfsArtifactRoot = hdfsArtifactRoot; 21 | this.configuration = configuration; 22 | } 23 | 24 | public static Builder newBuilder() { 25 | return new Builder(); 26 | } 27 | 28 | public Path getJarDirectory() { 29 | return jarDirectory; 30 | } 31 | 32 | public Path getHdfsArtifactRoot() { 33 | return hdfsArtifactRoot; 34 | } 35 | 36 | public Configuration getConfiguration() { 37 | return configuration; 38 | } 39 | 40 | public static class Builder { 41 | 42 | private Path jarDirectory; 43 | private Path hdfsArtifactRoot = Paths.get("jars"); 44 | private Configuration configuration; 45 | 46 | public Builder setJarByClass(Class jarClass) { 47 | URL url = jarClass.getResource( 48 | "/" + jarClass.getName().replace('.', '/') + ".class" 49 | ); 50 | if (url == null) { 51 | throw new IllegalStateException("Could not find resource " + jarClass); 52 | } 53 | 54 | String qualifiedPath = url.toString(); 55 | if (!qualifiedPath.startsWith("jar:file:")) { 56 | throw new IllegalStateException( 57 | "Class doesn't appear to be in a JAR, are you running from a JAR?" 58 | ); 59 | } 60 | 61 | String jarPath = qualifiedPath.substring( 62 | "jar:file:".length(), 63 | qualifiedPath.indexOf('!') 64 | ); 65 | return setJarDirectory(Paths.get(jarPath).getParent()); 66 | } 67 | 68 | public Builder setJarDirectory(Path jarDirectory) { 69 | this.jarDirectory = jarDirectory; 70 | return this; 71 | } 72 | 73 | public Builder setHdfsArtifactRoot(Path hdfsArtifactRoot) { 74 | this.hdfsArtifactRoot = hdfsArtifactRoot; 75 | return this; 76 | } 77 | 78 | public Builder setConfiguration(Configuration configuration) { 79 | this.configuration = configuration; 80 | return this; 81 | } 82 | 83 | public SlimfastHadoopConfiguration build() { 84 | if (jarDirectory == null) { 85 | throw new IllegalStateException("jarDirectory must be set"); 86 | } else if (hdfsArtifactRoot == null) { 87 | throw new IllegalStateException("hdfsArtifactRoot must be set"); 88 | } else if (configuration == null) { 89 | throw new IllegalStateException("configuration must be set"); 90 | } 91 | 92 | return new SlimfastHadoopConfiguration( 93 | jarDirectory, 94 | hdfsArtifactRoot, 95 | configuration 96 | ); 97 | } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | 5 | 6 | com.hubspot 7 | basepom 8 | 65.2 9 | 10 | 11 | com.hubspot.slimfast 12 | slimfast-parent 13 | 1.0.5-SNAPSHOT 14 | pom 15 | 16 | ${project.groupId}:${project.artifactId} 17 | A tool for slimming down uber jars 18 | 19 | 20 | slimfast-plugin 21 | slimfast-hadoop 22 | 23 | 24 | 25 | 17 26 | 27 | false 28 | 29 | 33.4.8-jre 30 | 31 | 32 | 33 | 34 | 35 | com.googlecode.json-simple 36 | json-simple 37 | 1.1.1 38 | 39 | 40 | org.apache.hadoop 41 | hadoop-common 42 | 3.2.3 43 | 44 | 45 | commons-collections 46 | commons-collections 47 | 48 | 49 | 50 | 51 | org.apache.maven 52 | maven-archiver 53 | 3.6.4 54 | 55 | 56 | com.google.guava 57 | guava 58 | 59 | 60 | 61 | 62 | org.codehaus.plexus 63 | plexus-interpolation 64 | 1.28 65 | 66 | 67 | org.codehaus.plexus 68 | plexus-utils 69 | 4.0.2 70 | 71 | 72 | software.amazon.awssdk.crt 73 | aws-crt 74 | 0.33.0 75 | 76 | 77 | software.amazon.awssdk 78 | bom 79 | 2.29.6 80 | pom 81 | import 82 | 83 | 84 | 85 | 86 | https://github.com/HubSpot/SlimFast 87 | 88 | 89 | 90 | The Apache License, Version 2.0 91 | http://www.apache.org/licenses/LICENSE-2.0.txt 92 | 93 | 94 | 95 | 96 | 97 | Jonathan Haber 98 | jhaber@hubspot.com 99 | 100 | 101 | 102 | 103 | scm:git:git@github.com:HubSpot/SlimFast.git 104 | scm:git:git@github.com:HubSpot/SlimFast.git 105 | git@github.com:HubSpot/SlimFast.git 106 | HEAD 107 | 108 | 109 | -------------------------------------------------------------------------------- /slimfast-hadoop/src/main/java/com/hubspot/slimfast/hadoop/HadoopHelper.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.slimfast.hadoop; 2 | 3 | import java.io.FileNotFoundException; 4 | import java.io.IOException; 5 | import java.io.InputStream; 6 | import java.net.URL; 7 | import java.util.Collections; 8 | import java.util.LinkedHashSet; 9 | import java.util.Set; 10 | import java.util.jar.Manifest; 11 | import org.apache.hadoop.conf.Configuration; 12 | import org.apache.hadoop.fs.FileStatus; 13 | import org.apache.hadoop.fs.FileSystem; 14 | import org.apache.hadoop.fs.Path; 15 | import org.slf4j.Logger; 16 | import org.slf4j.LoggerFactory; 17 | 18 | public class HadoopHelper { 19 | 20 | private static final Logger LOG = LoggerFactory.getLogger(HadoopHelper.class); 21 | 22 | public static void writeJarsToHdfsAndAddToClasspath( 23 | SlimfastHadoopConfiguration slimfastConfiguration 24 | ) { 25 | try { 26 | FileSystem hdfs = FileSystem.get(slimfastConfiguration.getConfiguration()); 27 | 28 | for (String jar : findClasspathJars(slimfastConfiguration.getJarDirectory())) { 29 | Path destination = new Path( 30 | slimfastConfiguration.getHdfsArtifactRoot().resolve(jar).toString() 31 | ); 32 | if (exists(hdfs, destination)) { 33 | LOG.info("Path already exists {}", destination); 34 | } else { 35 | Path source = new Path( 36 | slimfastConfiguration.getJarDirectory().resolve(jar).toString() 37 | ); 38 | hdfs.copyFromLocalFile(source, destination); 39 | LOG.info("Successfully uploaded path {}", destination); 40 | } 41 | 42 | addJarToJobConfiguration(destination, slimfastConfiguration.getConfiguration()); 43 | } 44 | } catch (IOException e) { 45 | throw new RuntimeException("Error writing JARs to HDFS", e); 46 | } 47 | } 48 | 49 | private static void addJarToJobConfiguration(Path jarPath, Configuration configuration) 50 | throws IOException { 51 | String jar = jarPath.toString(); 52 | String existingClasspath = configuration.get("mapreduce.job.classpath.files"); 53 | String updatedClasspath = existingClasspath == null 54 | ? jar 55 | : existingClasspath + "," + jar; 56 | configuration.set("mapreduce.job.classpath.files", updatedClasspath); 57 | 58 | jar = FileSystem.get(configuration).makeQualified(jarPath).toUri().toString(); 59 | String existingCacheFiles = configuration.get("mapreduce.job.cache.files"); 60 | String updatedCacheFiles = existingCacheFiles == null 61 | ? jar 62 | : existingCacheFiles + "," + jar; 63 | configuration.set("mapreduce.job.cache.files", updatedCacheFiles); 64 | } 65 | 66 | private static boolean exists(FileSystem fileSystem, Path path) throws IOException { 67 | try { 68 | FileStatus status = fileSystem.getFileStatus(path); 69 | return status != null && status.getLen() > 0; 70 | } catch (FileNotFoundException e) { 71 | return false; 72 | } 73 | } 74 | 75 | private static Set findClasspathJars(java.nio.file.Path jarDirectory) 76 | throws IOException { 77 | Set classpathJars = new LinkedHashSet<>(); 78 | for (URL url : Collections.list( 79 | getClassLoader().getResources("META-INF/MANIFEST.MF") 80 | )) { 81 | try (InputStream manifestStream = url.openStream()) { 82 | Manifest manifest = new Manifest(manifestStream); 83 | String classPath = manifest.getMainAttributes().getValue("Class-Path"); 84 | if (classPath != null) { 85 | for (String jar : classPath.split(" ")) { 86 | if (jarDirectory.resolve(jar).toFile().isFile()) { 87 | classpathJars.add(jar); 88 | } 89 | } 90 | } 91 | } 92 | } 93 | 94 | return classpathJars; 95 | } 96 | 97 | private static ClassLoader getClassLoader() { 98 | ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); 99 | return contextClassLoader == null 100 | ? HadoopHelper.class.getClassLoader() 101 | : contextClassLoader; 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/DownloadJarsMojo.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.io.File; 4 | import java.io.IOException; 5 | import java.lang.reflect.InvocationTargetException; 6 | import java.nio.file.Paths; 7 | import java.util.Optional; 8 | import org.apache.maven.plugin.AbstractMojo; 9 | import org.apache.maven.plugin.MojoExecutionException; 10 | import org.apache.maven.plugin.MojoFailureException; 11 | import org.apache.maven.plugins.annotations.Mojo; 12 | import org.apache.maven.plugins.annotations.Parameter; 13 | import software.amazon.awssdk.regions.Region; 14 | 15 | @Mojo(name = "download", requiresProject = false, threadSafe = true) 16 | public class DownloadJarsMojo extends AbstractMojo { 17 | 18 | @Parameter( 19 | property = "slimfast.fileDownloader", 20 | alias = "fileDownloader", 21 | defaultValue = "com.hubspot.maven.plugins.slimfast.DefaultFileDownloader" 22 | ) 23 | private String fileDownloaderType; 24 | 25 | @Parameter(property = "slimfast.s3.accessKey", defaultValue = "${s3.access.key}") 26 | private String s3AccessKey; 27 | 28 | @Parameter(property = "slimfast.s3.secretKey", defaultValue = "${s3.secret.key}") 29 | private String s3SecretKey; 30 | 31 | @Parameter(property = "slimfast.aws.region", defaultValue = "${aws.region}") 32 | private String awsRegion; 33 | 34 | @Parameter( 35 | property = "slimfast.cacheDirectory", 36 | defaultValue = "${settings.localRepository}" 37 | ) 38 | private String cacheDirectory; 39 | 40 | @Parameter(property = "slimfast.outputDirectory", defaultValue = "${basedir}") 41 | private String outputDirectory; 42 | 43 | @Parameter(property = "slimfast.inputFile", defaultValue = "target/slimfast.json") 44 | private String inputFile; 45 | 46 | @Override 47 | public void execute() throws MojoExecutionException, MojoFailureException { 48 | S3ArtifactWrapper wrapper = readArtifactInfo(); 49 | 50 | final DownloadConfiguration configuration = buildConfiguration(wrapper.getPrefix()); 51 | FileHelper.ensureDirectoryExists(configuration.getCacheDirectory()); 52 | 53 | try (FileDownloader downloader = instantiateFileDownloader()) { 54 | downloader.init(configuration); 55 | downloader.download(wrapper.getArtifacts()); 56 | } catch (IOException e) { 57 | throw new MojoExecutionException(e); 58 | } 59 | } 60 | 61 | private S3ArtifactWrapper readArtifactInfo() throws MojoFailureException { 62 | try { 63 | return JsonHelper.readArtifactsFromJson(new File(inputFile)); 64 | } catch (IOException e) { 65 | throw new MojoFailureException("Error reading dependencies from file", e); 66 | } 67 | } 68 | 69 | private DownloadConfiguration buildConfiguration(String prefix) { 70 | S3Configuration s3Configuration = new S3Configuration( 71 | Optional.ofNullable(s3AccessKey), 72 | Optional.ofNullable(s3SecretKey), 73 | Optional.ofNullable(awsRegion).map(Region::of), 74 | Optional.of(20.0), // aws-sdk default is 10.0 75 | Optional.empty() // aws-sdk default is 8mb 76 | ); 77 | 78 | return new DownloadConfiguration( 79 | s3Configuration, 80 | Paths.get(prefix), 81 | Paths.get(cacheDirectory), 82 | Paths.get(outputDirectory) 83 | ); 84 | } 85 | 86 | private FileDownloader instantiateFileDownloader() throws MojoExecutionException { 87 | try { 88 | return (FileDownloader) Class 89 | .forName(fileDownloaderType) 90 | .getDeclaredConstructor() 91 | .newInstance(); 92 | } catch (ClassNotFoundException e) { 93 | throw new MojoExecutionException( 94 | "Unable to find file downloader implementation", 95 | e 96 | ); 97 | } catch ( 98 | InstantiationException 99 | | IllegalAccessException 100 | | NoSuchMethodException 101 | | InvocationTargetException e 102 | ) { 103 | throw new MojoExecutionException("Unable to instantiate file downloader", e); 104 | } catch (ClassCastException e) { 105 | throw new MojoExecutionException("Must implement FileDownloader interface", e); 106 | } 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /slimfast-plugin/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | 5 | 6 | com.hubspot.slimfast 7 | slimfast-parent 8 | 1.0.5-SNAPSHOT 9 | 10 | 11 | com.hubspot.maven.plugins 12 | slimfast-plugin 13 | maven-plugin 14 | 15 | ${project.groupId}:${project.artifactId} 16 | 17 | 18 | 19 | com.google.code.findbugs 20 | annotations 21 | 22 | 23 | com.google.guava 24 | guava 25 | 26 | 27 | com.googlecode.json-simple 28 | json-simple 29 | 30 | 31 | org.apache.maven 32 | maven-archiver 33 | 34 | 35 | org.slf4j 36 | slf4j-api 37 | 38 | 39 | 40 | software.amazon.awssdk 41 | auth 42 | 43 | 44 | software.amazon.awssdk 45 | regions 46 | 47 | 48 | software.amazon.awssdk 49 | s3 50 | 51 | 52 | software.amazon.awssdk 53 | s3-transfer-manager 54 | 55 | 56 | org.apache.maven 57 | maven-artifact 58 | provided 59 | 60 | 61 | org.apache.maven 62 | maven-core 63 | provided 64 | 65 | 66 | org.apache.maven 67 | maven-model 68 | provided 69 | 70 | 71 | org.apache.maven 72 | maven-plugin-api 73 | provided 74 | 75 | 76 | org.apache.maven.plugin-tools 77 | maven-plugin-annotations 78 | provided 79 | 80 | 81 | org.codehaus.plexus 82 | plexus-interpolation 83 | provided 84 | 85 | 86 | org.codehaus.plexus 87 | plexus-utils 88 | provided 89 | 90 | 91 | org.codehaus.plexus 92 | plexus-xml 93 | provided 94 | 95 | 96 | joda-time 97 | joda-time 98 | runtime 99 | 100 | 101 | org.slf4j 102 | jcl-over-slf4j 103 | runtime 104 | 105 | 106 | software.amazon.awssdk.crt 107 | aws-crt 108 | runtime 109 | 110 | 111 | 112 | 113 | 114 | 115 | org.eclipse.sisu 116 | sisu-maven-plugin 117 | 118 | 119 | generate-index 120 | 121 | main-index 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/DefaultFileUploader.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import com.google.common.base.Throwables; 4 | import java.util.LinkedHashSet; 5 | import java.util.Optional; 6 | import java.util.Set; 7 | import java.util.concurrent.CompletableFuture; 8 | import java.util.concurrent.ExecutionException; 9 | import java.util.concurrent.TimeUnit; 10 | import java.util.concurrent.TimeoutException; 11 | import java.util.stream.Collector; 12 | import java.util.stream.Collectors; 13 | import org.slf4j.Logger; 14 | import org.slf4j.LoggerFactory; 15 | import software.amazon.awssdk.services.s3.S3AsyncClient; 16 | import software.amazon.awssdk.services.s3.model.HeadObjectRequest; 17 | import software.amazon.awssdk.services.s3.model.NoSuchKeyException; 18 | import software.amazon.awssdk.transfer.s3.S3TransferManager; 19 | import software.amazon.awssdk.transfer.s3.model.UploadFileRequest; 20 | 21 | public class DefaultFileUploader extends BaseFileUploader { 22 | 23 | private static final Logger LOG = LoggerFactory.getLogger(DefaultFileUploader.class); 24 | 25 | private S3AsyncClient s3AsyncClient; 26 | private S3TransferManager s3TransferManager; 27 | 28 | @Override 29 | public void init(UploadConfiguration config) { 30 | super.init(config); 31 | this.s3AsyncClient = S3Factory.createS3AsyncClient(config.getS3Configuration()); 32 | this.s3TransferManager = S3Factory.createTransferManager(s3AsyncClient); 33 | } 34 | 35 | @Override 36 | protected void doUpload(Set artifacts) 37 | throws ExecutionException, InterruptedException, TimeoutException { 38 | Optional bucket = artifacts.stream().findFirst().map(S3Artifact::getBucket); 39 | if (bucket.isEmpty()) { 40 | return; 41 | } 42 | 43 | Set existingKeys = checkForExistingKeys( 44 | bucket.get(), 45 | artifacts.stream().map(S3Artifact::getKey).collect(Collectors.toSet()) 46 | ); 47 | 48 | artifacts 49 | .stream() 50 | .filter(artifact -> { 51 | if (existingKeys.contains(artifact.getKey())) { 52 | LOG.info("Key already exists {}", artifact.getKey()); 53 | return false; 54 | } else { 55 | return true; 56 | } 57 | }) 58 | .map(artifact -> 59 | s3TransferManager 60 | .uploadFile( 61 | UploadFileRequest 62 | .builder() 63 | .putObjectRequest(b -> b.bucket(artifact.getBucket()).key(artifact.getKey()) 64 | ) 65 | .source( 66 | artifact 67 | .getLocalPath() 68 | .orElseThrow(() -> 69 | new IllegalArgumentException( 70 | "Artifact " + artifact.getKey() + " is missing localPath" 71 | ) 72 | ) 73 | ) 74 | .build() 75 | ) 76 | .completionFuture() 77 | .handle((result, ex) -> { 78 | if (ex != null) { 79 | throw new RuntimeException( 80 | "Error uploading file " + artifact.getLocalPath().get(), 81 | Throwables.getRootCause(ex) 82 | ); 83 | } 84 | LOG.info("Successfully uploaded key {}", artifact.getKey()); 85 | return result; 86 | }) 87 | ) 88 | .collect(futuresToSet()) 89 | .get(5, TimeUnit.MINUTES); 90 | } 91 | 92 | @Override 93 | public void close() { 94 | s3AsyncClient.close(); 95 | } 96 | 97 | private Set checkForExistingKeys(String bucket, Set keys) 98 | throws ExecutionException, InterruptedException, TimeoutException { 99 | return keys 100 | .stream() 101 | .map(key -> 102 | s3AsyncClient 103 | .headObject(HeadObjectRequest.builder().bucket(bucket).key(key).build()) 104 | .handle((response, ex) -> { 105 | if (ex == null) { 106 | return Optional.of(key); 107 | } else if (Throwables.getRootCause(ex) instanceof NoSuchKeyException) { 108 | return Optional.empty(); 109 | } else { 110 | throw new RuntimeException( 111 | "Error getting object metadata for key: " + key, 112 | ex 113 | ); 114 | } 115 | }) 116 | ) 117 | .collect(futuresToSet()) 118 | .get(5, TimeUnit.MINUTES) 119 | .stream() 120 | .flatMap(Optional::stream) 121 | .map(key -> (String) key) 122 | .collect(Collectors.toCollection(LinkedHashSet::new)); 123 | } 124 | 125 | public static Collector, ?, CompletableFuture>> futuresToSet() { 126 | return Collectors.collectingAndThen( 127 | Collectors.toCollection(LinkedHashSet::new), 128 | futures -> 129 | CompletableFuture 130 | .allOf(futures.toArray(new CompletableFuture[0])) 131 | .thenApply(ignored -> 132 | futures 133 | .stream() 134 | .map(CompletableFuture::join) 135 | .collect(Collectors.toCollection(LinkedHashSet::new)) 136 | ) 137 | ); 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/BaseUploadMojo.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.io.IOException; 4 | import java.lang.reflect.InvocationTargetException; 5 | import java.nio.file.Path; 6 | import java.nio.file.Paths; 7 | import java.util.Optional; 8 | import org.apache.maven.plugin.AbstractMojo; 9 | import org.apache.maven.plugin.MojoExecutionException; 10 | import org.apache.maven.plugin.MojoFailureException; 11 | import org.apache.maven.plugins.annotations.Parameter; 12 | import org.slf4j.Logger; 13 | import org.slf4j.LoggerFactory; 14 | import software.amazon.awssdk.regions.Region; 15 | 16 | public abstract class BaseUploadMojo extends AbstractMojo { 17 | 18 | private static final Logger LOG = LoggerFactory.getLogger(BaseUploadMojo.class); 19 | 20 | private static final String DEFAULT_UPLOADER = 21 | "com.hubspot.maven.plugins.slimfast.DefaultFileUploader"; 22 | private static final String DRY_RUN_UPLOADER = 23 | "com.hubspot.maven.plugins.slimfast.DryRunFileUploader"; 24 | 25 | @Parameter( 26 | property = "slimfast.fileUploader", 27 | alias = "fileUploader", 28 | defaultValue = DEFAULT_UPLOADER 29 | ) 30 | private String fileUploaderType; 31 | 32 | @Parameter(property = "slimfast.s3.accessKey", defaultValue = "${s3.access.key}") 33 | private String s3AccessKey; 34 | 35 | @Parameter(property = "slimfast.s3.secretKey", defaultValue = "${s3.secret.key}") 36 | private String s3SecretKey; 37 | 38 | @Parameter(property = "slimfast.aws.region", defaultValue = "${aws.region}") 39 | private String awsRegion; 40 | 41 | @Parameter(property = "slimfast.dryRun", defaultValue = "false") 42 | private boolean dryRun; 43 | 44 | @Parameter( 45 | property = "slimfast.s3.bucket", 46 | defaultValue = "${s3.bucket}", 47 | required = true 48 | ) 49 | private String s3Bucket; 50 | 51 | @Parameter( 52 | property = "slimfast.s3.artifactPrefix", 53 | defaultValue = "${s3.artifact.root}", 54 | required = true 55 | ) 56 | private String s3ArtifactRoot; 57 | 58 | @Parameter(property = "slimfast.plugin.skip", defaultValue = "false") 59 | private boolean skip; 60 | 61 | @Parameter( 62 | property = "slimfast.outputFile", 63 | defaultValue = "${project.build.directory}/slimfast.json" 64 | ) 65 | private String outputFile; 66 | 67 | @Parameter(property = "slimfast.allowUnresolvedSnapshots", defaultValue = "false") 68 | private boolean allowUnresolvedSnapshots; 69 | 70 | @Override 71 | public void execute() throws MojoExecutionException, MojoFailureException { 72 | if (skip) { 73 | LOG.info("Skipping plugin execution"); 74 | return; 75 | } 76 | 77 | final ArtifactWrapper artifactWrapper = getArtifactWrapper(); 78 | final UploadConfiguration configuration = buildConfiguration( 79 | artifactWrapper.getPrefix() 80 | ); 81 | 82 | FileHelper.ensureDirectoryExists(configuration.getOutputFile().getParent()); 83 | 84 | try (FileUploader fileUploader = instantiateFileUploader()) { 85 | fileUploader.init(configuration); 86 | fileUploader.upload(artifactWrapper.getLocalArtifacts()); 87 | } catch (IOException e) { 88 | throw new MojoExecutionException(e); 89 | } 90 | } 91 | 92 | protected abstract ArtifactWrapper getArtifactWrapper() throws MojoExecutionException; 93 | 94 | protected UploadConfiguration buildConfiguration(Path prefix) { 95 | S3Configuration s3Configuration = new S3Configuration( 96 | Optional.ofNullable(s3AccessKey), 97 | Optional.ofNullable(s3SecretKey), 98 | Optional.ofNullable(awsRegion).map(Region::of), 99 | Optional.of(20.0), // aws-sdk default is 10.0 100 | Optional.empty() // aws-sdk default is 8mb 101 | ); 102 | 103 | return new UploadConfiguration( 104 | s3Configuration, 105 | prefix, 106 | s3Bucket, 107 | s3ArtifactRoot, 108 | Paths.get(outputFile), 109 | allowUnresolvedSnapshots 110 | ); 111 | } 112 | 113 | private FileUploader instantiateFileUploader() throws MojoExecutionException { 114 | final String resolvedFileUploaderType; 115 | if (dryRun) { 116 | if (DEFAULT_UPLOADER.equals(fileUploaderType)) { 117 | resolvedFileUploaderType = DRY_RUN_UPLOADER; 118 | } else { 119 | throw new MojoExecutionException( 120 | "May not specify custom fileUploader when using the dryRun flag" 121 | ); 122 | } 123 | } else { 124 | resolvedFileUploaderType = fileUploaderType; 125 | } 126 | 127 | try { 128 | return (FileUploader) Class 129 | .forName(resolvedFileUploaderType) 130 | .getDeclaredConstructor() 131 | .newInstance(); 132 | } catch (ClassNotFoundException e) { 133 | throw new MojoExecutionException("Unable to find file uploader implementation", e); 134 | } catch ( 135 | InstantiationException 136 | | IllegalAccessException 137 | | NoSuchMethodException 138 | | InvocationTargetException e 139 | ) { 140 | throw new MojoExecutionException("Unable to instantiate file uploader", e); 141 | } catch (ClassCastException e) { 142 | throw new MojoExecutionException("Must implement FileUploader interface", e); 143 | } 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/JsonHelper.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.io.BufferedReader; 4 | import java.io.BufferedWriter; 5 | import java.io.File; 6 | import java.io.FileInputStream; 7 | import java.io.FileOutputStream; 8 | import java.io.IOException; 9 | import java.io.InputStreamReader; 10 | import java.io.OutputStreamWriter; 11 | import java.io.Reader; 12 | import java.io.StringWriter; 13 | import java.io.Writer; 14 | import java.nio.charset.StandardCharsets; 15 | import java.nio.file.Path; 16 | import java.util.LinkedHashSet; 17 | import java.util.Set; 18 | import org.json.simple.JSONArray; 19 | import org.json.simple.JSONObject; 20 | import org.json.simple.parser.JSONParser; 21 | import org.json.simple.parser.ParseException; 22 | import org.slf4j.Logger; 23 | import org.slf4j.LoggerFactory; 24 | 25 | public class JsonHelper { 26 | 27 | private static final Logger LOG = LoggerFactory.getLogger(JsonHelper.class); 28 | 29 | public static void writeArtifactsToJson(File outputFile, S3ArtifactWrapper wrapper) 30 | throws IOException { 31 | JSONObject json = new JSONObject(); 32 | 33 | JSONArray artifacts = new JSONArray(); 34 | for (S3Artifact artifact : wrapper.getArtifacts()) { 35 | artifacts.add(toJsonObject(artifact)); 36 | } 37 | 38 | json.put("prefix", wrapper.getPrefix()); 39 | json.put("artifacts", artifacts); 40 | 41 | try (Writer writer = newWriter(outputFile)) { 42 | json.writeJSONString(writer); 43 | writer.flush(); 44 | } 45 | } 46 | 47 | public static void writeArtifactsToJson( 48 | File outputFile, 49 | PreparedArtifactWrapper wrapper 50 | ) throws IOException { 51 | JSONObject json = new JSONObject(); 52 | 53 | JSONArray artifacts = new JSONArray(); 54 | for (PreparedArtifact artifact : wrapper.getArtifacts()) { 55 | artifacts.add(toJsonObject(artifact)); 56 | } 57 | 58 | json.put("prefix", wrapper.getPrefix().toString()); 59 | json.put("artifacts", artifacts); 60 | 61 | try (Writer writer = newWriter(outputFile)) { 62 | json.writeJSONString(writer); 63 | writer.flush(); 64 | } 65 | 66 | if (LOG.isDebugEnabled()) { 67 | StringWriter writer = new StringWriter(); 68 | json.writeJSONString(writer); 69 | 70 | LOG.debug("Wrote artifacts json: {}", writer.toString()); 71 | } 72 | } 73 | 74 | public static S3ArtifactWrapper readArtifactsFromJson(File inputFile) 75 | throws IOException { 76 | JSONParser parser = new JSONParser(); 77 | 78 | try (Reader reader = newReader(inputFile)) { 79 | try { 80 | JSONObject parsed = (JSONObject) parser.parse(reader); 81 | 82 | String prefix = (String) parsed.get("prefix"); 83 | Set artifacts = new LinkedHashSet<>(); 84 | for (Object object : (JSONArray) parsed.get("artifacts")) { 85 | artifacts.add(fromJsonObject((JSONObject) object)); 86 | } 87 | 88 | return new S3ArtifactWrapper(prefix, artifacts); 89 | } catch (ParseException e) { 90 | throw new IOException(e); 91 | } 92 | } 93 | } 94 | 95 | public static PreparedArtifactWrapper readPreparedArtifactsFromJson(File inputFile) 96 | throws IOException { 97 | JSONParser parser = new JSONParser(); 98 | 99 | try (Reader reader = newReader(inputFile)) { 100 | try { 101 | JSONObject parsed = (JSONObject) parser.parse(reader); 102 | 103 | String prefix = (String) parsed.get("prefix"); 104 | Set artifacts = new LinkedHashSet<>(); 105 | for (Object object : (JSONArray) parsed.get("artifacts")) { 106 | artifacts.add(preparedArtifactFromJsonObject((JSONObject) object)); 107 | } 108 | 109 | return new PreparedArtifactWrapper(Path.of(prefix), artifacts); 110 | } catch (ParseException e) { 111 | throw new IOException(e); 112 | } 113 | } 114 | } 115 | 116 | private static Writer newWriter(File file) throws IOException { 117 | FileOutputStream outputStream = new FileOutputStream(file); 118 | return new BufferedWriter( 119 | new OutputStreamWriter(outputStream, StandardCharsets.UTF_8) 120 | ); 121 | } 122 | 123 | private static Reader newReader(File file) throws IOException { 124 | FileInputStream outputStream = new FileInputStream(file); 125 | return new BufferedReader( 126 | new InputStreamReader(outputStream, StandardCharsets.UTF_8) 127 | ); 128 | } 129 | 130 | private static JSONObject toJsonObject(S3Artifact artifact) { 131 | JSONObject json = new JSONObject(); 132 | json.put("s3Bucket", artifact.getBucket()); 133 | json.put("s3ObjectKey", artifact.getKey()); 134 | json.put("targetPath", artifact.getTargetPath()); 135 | json.put("md5", artifact.getMd5()); 136 | json.put("filesize", artifact.getSize()); 137 | 138 | return json; 139 | } 140 | 141 | private static S3Artifact fromJsonObject(JSONObject json) { 142 | String bucket = (String) json.get("s3Bucket"); 143 | String key = (String) json.get("s3ObjectKey"); 144 | String targetPath = (String) json.get("targetPath"); 145 | String md5 = (String) json.get("md5"); 146 | long size = ((Number) json.get("filesize")).longValue(); 147 | 148 | return new S3Artifact(bucket, key, targetPath, md5, size); 149 | } 150 | 151 | private static JSONObject toJsonObject(PreparedArtifact preparedArtifact) { 152 | JSONObject jsonObject = new JSONObject(); 153 | jsonObject.put("localPath", preparedArtifact.getLocalPath()); 154 | jsonObject.put("targetPath", preparedArtifact.getTargetPath()); 155 | 156 | return jsonObject; 157 | } 158 | 159 | private static PreparedArtifact preparedArtifactFromJsonObject(JSONObject jsonObject) { 160 | String localPath = (String) jsonObject.get("localPath"); 161 | String targetPath = (String) jsonObject.get("targetPath"); 162 | 163 | return new PreparedArtifact(localPath, targetPath); 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/DefaultFileDownloader.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import java.io.IOException; 4 | import java.io.UncheckedIOException; 5 | import java.nio.file.Files; 6 | import java.nio.file.Path; 7 | import java.nio.file.Paths; 8 | import java.util.Set; 9 | import java.util.concurrent.CompletableFuture; 10 | import java.util.concurrent.TimeUnit; 11 | import org.apache.maven.plugin.MojoExecutionException; 12 | import org.apache.maven.plugin.MojoFailureException; 13 | import org.slf4j.Logger; 14 | import org.slf4j.LoggerFactory; 15 | import software.amazon.awssdk.services.s3.S3AsyncClient; 16 | import software.amazon.awssdk.transfer.s3.S3TransferManager; 17 | import software.amazon.awssdk.transfer.s3.model.DownloadFileRequest; 18 | 19 | public class DefaultFileDownloader implements FileDownloader { 20 | 21 | private static final Logger LOG = LoggerFactory.getLogger(DefaultFileDownloader.class); 22 | 23 | private S3AsyncClient s3AsyncClient; 24 | private S3TransferManager s3TransferManager; 25 | 26 | private DownloadConfiguration config; 27 | 28 | @Override 29 | public void init(DownloadConfiguration config) { 30 | this.config = config; 31 | 32 | this.s3AsyncClient = S3Factory.createS3AsyncClient(config.getS3Configuration()); 33 | this.s3TransferManager = S3Factory.createTransferManager(s3AsyncClient); 34 | } 35 | 36 | @Override 37 | public void download(Set artifacts) 38 | throws MojoExecutionException, MojoFailureException { 39 | try { 40 | artifacts 41 | .stream() 42 | .map(this::fetchCachedFile) 43 | .collect(DefaultFileUploader.futuresToSet()) 44 | .get(5, TimeUnit.MINUTES) 45 | .stream() 46 | .filter(cachedFile -> { 47 | if (Files.exists(cachedFile.targetFile)) { 48 | LOG.info("Target file exists {}", cachedFile.targetFile); 49 | return false; 50 | } 51 | return true; 52 | }) 53 | .forEach(cachedFile -> { 54 | try { 55 | FileHelper.ensureDirectoryExists(cachedFile.targetFile.getParent()); 56 | Files.copy(cachedFile.cacheFile, cachedFile.targetFile); 57 | verifyChecksums(cachedFile.targetFile, cachedFile.artifact); 58 | } catch (IOException e) { 59 | throw new UncheckedIOException( 60 | String.format( 61 | "Error copying file from %s to %s", 62 | cachedFile.cacheFile, 63 | cachedFile.targetFile 64 | ), 65 | e 66 | ); 67 | } 68 | }); 69 | } catch (Exception e) { 70 | throw new MojoExecutionException(e); 71 | } 72 | } 73 | 74 | private record CachedFile(S3Artifact artifact, Path cacheFile, Path targetFile) {} 75 | 76 | private CompletableFuture fetchCachedFile(S3Artifact artifact) { 77 | Path cacheFile = config 78 | .getCacheDirectory() 79 | .resolve(config.getPrefix().relativize(Paths.get(artifact.getTargetPath()))); 80 | Path targetFile = config.getOutputDirectory().resolve(artifact.getTargetPath()); 81 | 82 | CachedFile result = new CachedFile(artifact, cacheFile, targetFile); 83 | 84 | if (artifactIsCached(cacheFile, artifact)) { 85 | LOG.info("Target file is cached {}", cacheFile); 86 | return CompletableFuture.completedFuture(result); 87 | } else { 88 | Path tempPath = createTempFile(cacheFile); 89 | FileHelper.ensureDirectoryExists(cacheFile.getParent()); 90 | 91 | return s3TransferManager 92 | .downloadFile( 93 | DownloadFileRequest 94 | .builder() 95 | .getObjectRequest(req -> 96 | req.bucket(artifact.getBucket()).key(artifact.getKey()) 97 | ) 98 | .destination(tempPath) 99 | .build() 100 | ) 101 | .completionFuture() 102 | .thenApply(ignored -> { 103 | FileHelper.atomicMove(tempPath, cacheFile); 104 | LOG.info("Successfully downloaded key {}", artifact.getKey()); 105 | return result; 106 | }); 107 | } 108 | } 109 | 110 | @Override 111 | public void close() throws IOException { 112 | s3AsyncClient.close(); 113 | } 114 | 115 | private boolean artifactIsCached(Path path, S3Artifact artifact) { 116 | return Files.exists(path) && checksumsMatch(path, artifact); 117 | } 118 | 119 | private Path createTempFile(Path path) { 120 | try { 121 | return Files.createTempFile( 122 | config.getCacheDirectory(), 123 | String.valueOf(path.getFileName()), 124 | null 125 | ); 126 | } catch (IOException e) { 127 | throw new UncheckedIOException( 128 | "Error creating temp file in " + config.getCacheDirectory(), 129 | e 130 | ); 131 | } 132 | } 133 | 134 | private static void verifyChecksums(Path path, S3Artifact artifact) { 135 | long actualSize = FileHelper.size(path); 136 | long expectedSize = artifact.getSize(); 137 | if (actualSize != expectedSize) { 138 | throw new IllegalStateException( 139 | String.format( 140 | "File %s has unexpected size, expected=%s, actual=%s", 141 | path, 142 | expectedSize, 143 | actualSize 144 | ) 145 | ); 146 | } 147 | 148 | String actualMd5 = FileHelper.md5(path); 149 | String expectedMd5 = artifact.getMd5(); 150 | if (!actualMd5.equals(expectedMd5)) { 151 | throw new IllegalStateException( 152 | String.format( 153 | "File %s has unexpected checksum, expected=%s, actual=%s", 154 | path, 155 | expectedMd5, 156 | actualMd5 157 | ) 158 | ); 159 | } 160 | } 161 | 162 | private static boolean checksumsMatch(Path path, S3Artifact artifact) { 163 | return ( 164 | FileHelper.size(path) == artifact.getSize() && 165 | FileHelper.md5(path).equals(artifact.getMd5()) 166 | ); 167 | } 168 | } 169 | -------------------------------------------------------------------------------- /slimfast-plugin/README.md: -------------------------------------------------------------------------------- 1 | # slimfast-plugin 2 | 3 | ## Overview ## 4 | 5 | The slimfast-plugin can be used in place of the maven-assembly-plugin or maven-shade-plugin (which are often used to build 6 | fat jars). In addition, if you configure the maven-jar-plugin in the right way, the resulting jar (although not a fat jar) 7 | will still be runnable using plain old `java -jar` (ie, without needing to manually construct the classpath). 8 | 9 | This uses a feature of the JVM which is that if you run a jar which has a `Class-Path` entry in its manifest, then those 10 | paths are added to the classpath of the JVM. Using this feature, we can tell the maven-jar-plugin to build the classpath 11 | for us at build time and add it as a manifest property. Then we can configure the slimfast-plugin to copy the dependency 12 | jars to the right place and the resulting jar will start up fine when run with `java -jar`. 13 | 14 | ## Usage ## 15 | 16 | The plugin has three goals: `copy`, `upload`, and `download`. 17 | 18 | `copy` can be used to copy your dependencies to the target folder so they're available at runtime ([example](#copy-goal)). 19 | This is similar to the `copy-dependencies` goal of the `maven-dependency-plugin`, but we were unable to get that to work 20 | with a repository layout combined with resolved snapshot versions (the `useBaseVersion` flag seems to get ignored when the 21 | `useRepositoryLayout` flag is set). Using the `copy` goal saves you the time of building a fat jar and eliminates the jar 22 | merging complexities, but it doesn't reduce the size of your build artifacts. 23 | 24 | Just using the `copy` goal has a lot of advantages and is a big win in its own right, but there's still room for improvement. 25 | At HubSpot, for example, we tar up the build directory and upload it to S3 at the end of the build. Then we download and 26 | untar it on the application servers when someone wants to deploy. Using the `copy` goal doesn't reduce the size of these 27 | tarballs so we're still uploading the same amount to S3 on build and downloading the same amount on deploy. This adds 28 | time to builds and deploys, uses lots of bandwidth, and costs money for storing these large artifacts in S3. 29 | 30 | But fear not! This is what the `upload` and `download` goals are for. The `upload` goal binds to the deploy phase by default 31 | and will upload all of the project's dependencies to S3 ([example](#upload-goal)). It only uploads a dependency if it doesn't 32 | already exist in S3, so after the initial build this step should mostly be a no-op and go very fast. When it's done uploading 33 | the files, it will write out a JSON file (`target/slimfast.json` by default) containing information that can be used later to 34 | download the dependencies to the correct paths. 35 | 36 | The most straightforward way to use this JSON file is to run the `download` goal on your application servers before startup. 37 | This goal doesn't require a project so it can run in standalone mode without a `pom.xml`. A minimal invocation would look 38 | like [this](#download-goal). It will download all of the project dependencies (determined by reading `target/slimfast.json`) 39 | to the correct paths so that the application will start up with `java -jar`. 40 | 41 | Another option is to integrate this into your deployment phase, which is what we've done at HubSpot. Before using SlimFast, 42 | at build time we would generate a single S3 artifact and store its information in the database so that we can fetch it at deploy 43 | time. Now, we just store an array of S3 artifacts produced by the build (the main artifact, combined with the SlimFast artifacts 44 | read from `target/slimfast.json`). At deploy time, [Singularity](https://github.com/HubSpot/Singularity) downloads all these S3 45 | artifacts for us so everything just works. 46 | 47 | ## Examples ## 48 | 49 | ### Copy Goal ### 50 | 51 | ```xml 52 | 53 | 54 | 55 | org.apache.maven.plugins 56 | maven-jar-plugin 57 | 58 | 59 | 60 | true 61 | ${your-main-class-property} 62 | lib/ 63 | repository 64 | 65 | 66 | 67 | 68 | 69 | com.hubspot.maven.plugins 70 | slimfast-plugin 71 | 1.0.0 72 | 73 | 74 | 75 | copy 76 | 77 | package 78 | 79 | 80 | 81 | 82 | 83 | ``` 84 | 85 | ### Upload Goal ### 86 | 87 | ```xml 88 | 89 | 90 | 91 | org.apache.maven.plugins 92 | maven-jar-plugin 93 | 94 | 95 | 96 | true 97 | ${your-main-class-property} 98 | lib/ 99 | repository 100 | 101 | 102 | 103 | 104 | 105 | com.hubspot.maven.plugins 106 | slimfast-plugin 107 | 1.0.0 108 | 109 | 110 | 111 | upload 112 | 113 | deploy 114 | 115 | my-bucket 116 | jars 117 | abc 118 | 123 119 | us-east-1 120 | 121 | 122 | 123 | 124 | 125 | 126 | ``` 127 | 128 | You probably don't want to hard-code these S3 credentials in your pom though, instead you can use the 129 | `properties-maven-plugin` to read them from a file that is managed by puppet or your configuration management 130 | tool of choice. If you have a file located at `/etc/slimfast.properties` with contents like: 131 | 132 | ```properties 133 | s3.bucket=my-bucket 134 | s3.artifact.root=jars 135 | s3.access.key=abc 136 | s3.secret.key=123 137 | ``` 138 | 139 | Then you could invoke SlimFast like this: 140 | 141 | ```xml 142 | 143 | 144 | 145 | org.apache.maven.plugins 146 | maven-jar-plugin 147 | 148 | 149 | 150 | true 151 | ${your-main-class-property} 152 | lib/ 153 | repository 154 | 155 | 156 | 157 | 158 | 159 | org.codehaus.mojo 160 | properties-maven-plugin 161 | 1.0.0 162 | 163 | 164 | 165 | read-project-properties 166 | 167 | initialize 168 | 169 | 170 | /etc/slimfast.properties 171 | 172 | 173 | 174 | 175 | 176 | 177 | com.hubspot.maven.plugins 178 | slimfast-plugin 179 | 1.0.0 180 | 181 | 182 | 183 | upload 184 | 185 | deploy 186 | 187 | 188 | 189 | 190 | 191 | ``` 192 | 193 | ### Download Goal ### 194 | 195 | ```bash 196 | mvn com.hubspot.maven.plugins:slimfast-plugin:0.12:download -Dslimfast.s3.accessKey=abc -Dslimfast.s3.secretKey=123 197 | ``` 198 | -------------------------------------------------------------------------------- /slimfast-plugin/src/main/java/com/hubspot/maven/plugins/slimfast/ArtifactHelper.java: -------------------------------------------------------------------------------- 1 | package com.hubspot.maven.plugins.slimfast; 2 | 3 | import static org.apache.maven.archiver.MavenArchiver.REPOSITORY_LAYOUT; 4 | import static org.apache.maven.archiver.MavenArchiver.REPOSITORY_LAYOUT_NONUNIQUE; 5 | import static org.apache.maven.archiver.MavenArchiver.SIMPLE_LAYOUT; 6 | import static org.apache.maven.archiver.MavenArchiver.SIMPLE_LAYOUT_NONUNIQUE; 7 | 8 | import java.io.File; 9 | import java.nio.file.Path; 10 | import java.nio.file.Paths; 11 | import java.util.ArrayList; 12 | import java.util.Collections; 13 | import java.util.ConcurrentModificationException; 14 | import java.util.HashSet; 15 | import java.util.LinkedHashSet; 16 | import java.util.List; 17 | import java.util.Properties; 18 | import java.util.Set; 19 | import javax.inject.Inject; 20 | import javax.inject.Named; 21 | import javax.inject.Singleton; 22 | import org.apache.maven.archiver.ManifestConfiguration; 23 | import org.apache.maven.archiver.MavenArchiveConfiguration; 24 | import org.apache.maven.artifact.Artifact; 25 | import org.apache.maven.configuration.BeanConfigurationException; 26 | import org.apache.maven.configuration.BeanConfigurationRequest; 27 | import org.apache.maven.configuration.BeanConfigurator; 28 | import org.apache.maven.configuration.DefaultBeanConfigurationRequest; 29 | import org.apache.maven.plugin.MojoExecutionException; 30 | import org.apache.maven.project.MavenProject; 31 | import org.codehaus.plexus.interpolation.InterpolationException; 32 | import org.codehaus.plexus.interpolation.Interpolator; 33 | import org.codehaus.plexus.interpolation.PrefixAwareRecursionInterceptor; 34 | import org.codehaus.plexus.interpolation.PrefixedObjectValueSource; 35 | import org.codehaus.plexus.interpolation.PrefixedPropertiesValueSource; 36 | import org.codehaus.plexus.interpolation.RecursionInterceptor; 37 | import org.codehaus.plexus.interpolation.StringSearchInterpolator; 38 | import org.codehaus.plexus.interpolation.ValueSource; 39 | 40 | @Named 41 | @Singleton 42 | public class ArtifactHelper { 43 | 44 | private static final List ARTIFACT_EXPRESSION_PREFIXES = 45 | Collections.singletonList("artifact."); 46 | 47 | private static final Object ARTIFACTS_LOCK = new Object(); 48 | 49 | private final BeanConfigurator beanConfigurator; 50 | private final MavenProject project; 51 | 52 | @Inject 53 | public ArtifactHelper(BeanConfigurator beanConfigurator, MavenProject project) { 54 | this.beanConfigurator = beanConfigurator; 55 | this.project = project; 56 | } 57 | 58 | public LocalArtifactWrapper getArtifactPaths() throws MojoExecutionException { 59 | ManifestConfiguration manifestConfiguration = parseManifestConfiguration( 60 | beanConfigurator, 61 | project 62 | ); 63 | 64 | if (!manifestConfiguration.isAddClasspath()) { 65 | throw new MojoExecutionException( 66 | "maven-jar-plugin is not configured to add classpath" 67 | ); 68 | } 69 | 70 | Set artifacts = new HashSet<>(); 71 | for (String classpathElement : classpathElements()) { 72 | File classpathFile = new File(classpathElement); 73 | if (classpathFile.getAbsoluteFile().isFile()) { 74 | Artifact artifact = findArtifactWithFile(project.getArtifacts(), classpathFile); 75 | 76 | Path localPath = classpathFile.toPath(); 77 | final Path targetPath; 78 | if (artifact == null || manifestConfiguration.getClasspathLayoutType() == null) { 79 | targetPath = localPath.getFileName(); 80 | } else { 81 | targetPath = computePath(artifact, manifestConfiguration); 82 | } 83 | 84 | artifacts.add(new LocalArtifact(localPath, targetPath)); 85 | } 86 | } 87 | 88 | Path prefix = Paths.get("target").resolve(manifestConfiguration.getClasspathPrefix()); 89 | return new LocalArtifactWrapper(prefix, artifacts); 90 | } 91 | 92 | private static ManifestConfiguration parseManifestConfiguration( 93 | BeanConfigurator beanConfigurator, 94 | MavenProject project 95 | ) throws MojoExecutionException { 96 | MavenArchiveConfiguration archiveConfiguration = new MavenArchiveConfiguration(); 97 | 98 | BeanConfigurationRequest beanConfiguration = new DefaultBeanConfigurationRequest() 99 | .setBean(archiveConfiguration) 100 | .setConfiguration( 101 | project.getModel(), 102 | "org.apache.maven.plugins", 103 | "maven-jar-plugin", 104 | "default-jar" 105 | ); 106 | 107 | beanConfiguration.setConfiguration(beanConfiguration.getConfiguration(), "archive"); 108 | 109 | try { 110 | beanConfigurator.configureBean(beanConfiguration); 111 | } catch (BeanConfigurationException e) { 112 | throw new MojoExecutionException("Error parsing archive configuration", e); 113 | } 114 | 115 | return archiveConfiguration.getManifest(); 116 | } 117 | 118 | /* 119 | * This is a copy of MavenProject::getRuntimeClasspathElements() which makes 120 | * a defensive copy of project.getArtifacts() prior to iteration, to prevent 121 | * ConcurrentModificationExceptions. 122 | */ 123 | private List classpathElements() { 124 | List list = new ArrayList<>(project.getArtifacts().size() + 1); 125 | 126 | String d = project.getBuild().getOutputDirectory(); 127 | if (d != null) { 128 | list.add(d); 129 | } 130 | 131 | Set artifactsCopy; 132 | int attempts = 0; 133 | while (true) { 134 | try { 135 | synchronized (ARTIFACTS_LOCK) { 136 | artifactsCopy = new LinkedHashSet<>(project.getArtifacts()); 137 | break; 138 | } 139 | } catch (ConcurrentModificationException e) { 140 | if (++attempts > 10) { 141 | throw new RuntimeException("Failed to copy artifacts after 10 attempts", e); 142 | } 143 | // Brief pause before retry 144 | try { 145 | Thread.sleep(10L * attempts); 146 | } catch (InterruptedException ie) { 147 | Thread.currentThread().interrupt(); 148 | throw new RuntimeException("Interrupted while retrying artifact copy", ie); 149 | } 150 | } 151 | } 152 | 153 | for (Artifact a : artifactsCopy) { 154 | if ( 155 | a.getArtifactHandler().isAddedToClasspath() && 156 | // TODO let the scope handler deal with this 157 | (Artifact.SCOPE_COMPILE.equals(a.getScope()) || 158 | Artifact.SCOPE_RUNTIME.equals(a.getScope())) 159 | ) { 160 | File file = a.getFile(); 161 | if (file != null) { 162 | list.add(file.getPath()); 163 | } 164 | } 165 | } 166 | return list; 167 | } 168 | 169 | private static Path computePath(Artifact artifact, ManifestConfiguration config) 170 | throws MojoExecutionException { 171 | String layoutType = config.getClasspathLayoutType(); 172 | String layout = config.getCustomClasspathLayout(); 173 | 174 | Interpolator interpolator = new StringSearchInterpolator(); 175 | 176 | List valueSources = new ArrayList<>(); 177 | 178 | valueSources.add( 179 | new PrefixedObjectValueSource(ARTIFACT_EXPRESSION_PREFIXES, artifact, true) 180 | ); 181 | valueSources.add( 182 | new PrefixedObjectValueSource( 183 | ARTIFACT_EXPRESSION_PREFIXES, 184 | artifact.getArtifactHandler(), 185 | true 186 | ) 187 | ); 188 | 189 | Properties extraExpressions = new Properties(); 190 | if (!artifact.isSnapshot()) { 191 | extraExpressions.setProperty("baseVersion", artifact.getVersion()); 192 | } 193 | 194 | extraExpressions.setProperty("groupIdPath", artifact.getGroupId().replace('.', '/')); 195 | if (artifact.hasClassifier()) { 196 | extraExpressions.setProperty("dashClassifier", "-" + artifact.getClassifier()); 197 | extraExpressions.setProperty("dashClassifier?", "-" + artifact.getClassifier()); 198 | } else { 199 | extraExpressions.setProperty("dashClassifier", ""); 200 | extraExpressions.setProperty("dashClassifier?", ""); 201 | } 202 | valueSources.add( 203 | new PrefixedPropertiesValueSource( 204 | ARTIFACT_EXPRESSION_PREFIXES, 205 | extraExpressions, 206 | true 207 | ) 208 | ); 209 | 210 | for (ValueSource vs : valueSources) { 211 | interpolator.addValueSource(vs); 212 | } 213 | 214 | RecursionInterceptor recursionInterceptor = new PrefixAwareRecursionInterceptor( 215 | ARTIFACT_EXPRESSION_PREFIXES 216 | ); 217 | 218 | try { 219 | boolean useUniqueVersionsLayout = config.isUseUniqueVersions(); 220 | 221 | final String resolvedLayout; 222 | switch (layoutType) { 223 | case ManifestConfiguration.CLASSPATH_LAYOUT_TYPE_SIMPLE: 224 | resolvedLayout = 225 | useUniqueVersionsLayout ? SIMPLE_LAYOUT : SIMPLE_LAYOUT_NONUNIQUE; 226 | break; 227 | case ManifestConfiguration.CLASSPATH_LAYOUT_TYPE_REPOSITORY: 228 | resolvedLayout = 229 | useUniqueVersionsLayout ? REPOSITORY_LAYOUT : REPOSITORY_LAYOUT_NONUNIQUE; 230 | break; 231 | case ManifestConfiguration.CLASSPATH_LAYOUT_TYPE_CUSTOM: 232 | resolvedLayout = layout; 233 | break; 234 | default: 235 | throw new MojoExecutionException( 236 | "Unknown classpath layout type: " + layoutType 237 | ); 238 | } 239 | 240 | return Paths.get(interpolator.interpolate(resolvedLayout, recursionInterceptor)); 241 | } catch (InterpolationException e) { 242 | throw new MojoExecutionException("Error computing path for classpath entry", e); 243 | } 244 | } 245 | 246 | private static Artifact findArtifactWithFile(Set artifacts, File file) { 247 | for (Artifact artifact : artifacts) { 248 | if (file.equals(artifact.getFile())) { 249 | return artifact; 250 | } 251 | } 252 | 253 | return null; 254 | } 255 | } 256 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | --------------------------------------------------------------------------------