├── .gitignore ├── README.md ├── index.php ├── page.htm └── s3upload.js /.gitignore: -------------------------------------------------------------------------------- 1 | composer.* 2 | keys.php 3 | vendor/* 4 | 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # AWS S3 Multipart Upload in Browser 2 | ## What is this? 3 | Uploading files from the browser directly to S3 is needed in many applications. Fine-grained authorization is handled by the server, and the browser only handles file upload. This library does that. 4 | 5 | ## What's so special? 6 | Unfortunately S3 does not allow uploading files larger than 5GB in one chunk, and all the examples in AWS docs either support one chunk, or support **multipart uploads** only on the server. 7 | 8 | As we don't want to proxy the upload traffic to a server (which negates the whole purpose of using S3), we need an S3 multipart upload solution from the browser. 9 | 10 | The code by [s3-multipart-upload-browser](https://github.com/ienzam/s3-multipart-upload-browser) does some of that, but many of its features don't work. Most notably, it does not support AWS API V3. 11 | 12 | ## Features 13 | 14 | This uploader supports upload prefixes (uploading only in a certain directory), fine-grained feedback, parallelized chunks, cancellation of the upload (and removal of the parts on S3), automated parts cleanup, drag & drop, progress bar and chunk size management. 15 | 16 | ## Requirements 17 | - The browser Javascript code relies on jQuery. It also requires a browser with Blob, File and XHR2 support (most do since 2012). 18 | - The server-side code is in PHP, but is straightforward enough to port to any language and is less than 200 LOC. 19 | - If using with PHP, it needs AWS credentials and the AWS PHP SDK V3+ as well. 20 | 21 | That's all! 22 | 23 | ## Installation 24 | Just put all files in a web directory, set the AWS credentials in index.php or in keys.php file, set your bucket's CORS config (can be done via a function in the code too), and you're ready. 25 | 26 | ## Notes 27 | I strongly recommend setting your S3 bucket for [auto-removal of unfinished multipart upload parts](https://aws.amazon.com/blogs/aws/s3-lifecycle-management-update-support-for-multipart-uploads-and-delete-markers/). Otherwise, any incomplete upload will leave useless files on your bucket, for which you will be charged. 28 | 29 | I also suggest using the largest supported chunk size (5GB) to make the XHR connections minimal. Uploading a 200GB file with 5GB chunks needs 40 parts, but using 100MB chunks requires 2000. Authorizing parts with AWS is both slow and pricy. 30 | 31 | ## License 32 | This work is released under MIT license. 33 | -------------------------------------------------------------------------------- /index.php: -------------------------------------------------------------------------------- 1 | 'latest', 49 | 'region' => 'us-east-1', 50 | 'signature_version' => 'v4', 51 | 'credentials' => [ 52 | 'key' => aws_key(), 53 | 'secret' => aws_secret(), 54 | ] 55 | ]); 56 | if ($command===null) 57 | return $s3; 58 | $args=func_get_args(); 59 | array_shift($args); 60 | try { 61 | $res=call_user_func_array([$s3,$command],$args); 62 | return $res; 63 | } 64 | catch (AwsException $e) 65 | { 66 | echo $e->getMessage(),PHP_EOL; 67 | } 68 | return null; 69 | } 70 | /** 71 | * Output data as json with proper header 72 | * @param mixed $data 73 | */ 74 | function json_output($data) 75 | { 76 | header('Content-Type: application/json'); 77 | die(json_encode($data)); 78 | } 79 | /** 80 | * Deletes all multipart uploads that are not completed. 81 | * 82 | * Useful to clear up the clutter from your bucket 83 | * You can also set the bucket to delete them every day 84 | * @return integer number of deleted objects 85 | */ 86 | function abortPendingUploads($bucket) 87 | { 88 | $count=0; 89 | $res=s3("listMultipartUploads",["Bucket"=>bucket()]); 90 | if (is_array($res["Uploads"])) 91 | foreach ($res["Uploads"] as $item) 92 | { 93 | 94 | $r=s3("abortMultipartUpload",[ 95 | "Bucket"=>$bucket, 96 | "Key"=>$item["Key"], 97 | "UploadId"=>$item["UploadId"], 98 | ]); 99 | $count++; 100 | } 101 | return $count; 102 | } 103 | /** 104 | * Enables CORS on bucket 105 | * 106 | * This needs to be called exactly once on a bucket before browser uploads. 107 | * @param string $bucket 108 | */ 109 | function setCORS($bucket) 110 | { 111 | $res=s3("getBucketCors",["Bucket"=>$bucket]); 112 | $res=s3("putBucketCors", 113 | [ 114 | "Bucket"=>$bucket, 115 | "CORSConfiguration"=>[ 116 | "CORSRules"=>[ 117 | [ 118 | 'AllowedHeaders'=>['*'], 119 | 'AllowedMethods'=> ['POST','GET','HEAD','PUT'], 120 | "AllowedOrigins"=>["localhost","*"], 121 | ], 122 | ], 123 | ], 124 | ]); 125 | } 126 | 127 | if (isset($_POST['command'])) 128 | { 129 | $command=$_POST['command']; 130 | if ($command=="create") 131 | { 132 | $res=s3("createMultipartUpload",[ 133 | 'Bucket' => bucket(), 134 | 'Key' => prefix().$_POST['fileInfo']['name'], 135 | 'ContentType' => $_REQUEST['fileInfo']['type'], 136 | 'Metadata' => $_REQUEST['fileInfo'] 137 | ]); 138 | json_output(array( 139 | 'uploadId' => $res->get('UploadId'), 140 | 'key' => $res->get('Key'), 141 | )); 142 | } 143 | 144 | if ($command=="part") 145 | { 146 | $command=s3("getCommand","UploadPart",[ 147 | 'Bucket' => bucket(), 148 | 'Key' => $_REQUEST['sendBackData']['key'], 149 | 'UploadId' => $_REQUEST['sendBackData']['uploadId'], 150 | 'PartNumber' => $_REQUEST['partNumber'], 151 | 'ContentLength' => $_REQUEST['contentLength'] 152 | ]); 153 | 154 | // Give it at least 24 hours for large uploads 155 | $request=s3("createPresignedRequest",$command,"+48 hours"); 156 | json_output([ 157 | 'url' => (string)$request->getUri(), 158 | ]); 159 | } 160 | 161 | if ($command=="complete") 162 | { 163 | $partsModel = s3("listParts",[ 164 | 'Bucket' => bucket(), 165 | 'Key' => $_REQUEST['sendBackData']['key'], 166 | 'UploadId' => $_REQUEST['sendBackData']['uploadId'], 167 | ]); 168 | $model = s3("completeMultipartUpload",[ 169 | 'Bucket' => bucket(), 170 | 'Key' => $_REQUEST['sendBackData']['key'], 171 | 'UploadId' => $_REQUEST['sendBackData']['uploadId'], 172 | 'MultipartUpload' => [ 173 | "Parts"=>$partsModel["Parts"], 174 | ], 175 | ]); 176 | json_output([ 177 | 'success' => true 178 | ]); 179 | } 180 | if ($command=="abort") 181 | { 182 | $model = s3("abortMultipartUpload",[ 183 | 'Bucket' => bucket(), 184 | 'Key' => $_REQUEST['sendBackData']['key'], 185 | 'UploadId' => $_REQUEST['sendBackData']['uploadId'] 186 | ]); 187 | json_output([ 188 | 'success' => true 189 | ]); 190 | } 191 | 192 | exit(0); 193 | } 194 | 195 | 196 | include "page.htm"; -------------------------------------------------------------------------------- /page.htm: -------------------------------------------------------------------------------- 1 |
You can upload files to S3 here.
98 | 117 |