From fe6146a7f2dadddde03ae71633b582f503db7f38 Mon Sep 17 00:00:00 2001 From: Josh Tomar Date: Tue, 9 May 2023 14:22:49 -0700 Subject: [PATCH] Added very minimalist support for an S3 backend that make use of a publicly readable S3 bucket. Terraform project included at src/static/terraform to describe the S3 bucket and attached policies. --- src/Classes/Upload.php | 55 ++++++++++++++++++--------- src/composer.json | 5 ++- src/config.json | 5 +++ src/static/terraform/main.tf | 53 ++++++++++++++++++++++++++ src/static/terraform/terraform.tfvars | 2 + 5 files changed, 101 insertions(+), 19 deletions(-) create mode 100644 src/static/terraform/main.tf create mode 100644 src/static/terraform/terraform.tfvars diff --git a/src/Classes/Upload.php b/src/Classes/Upload.php index e7becc0..6c396e9 100644 --- a/src/Classes/Upload.php +++ b/src/Classes/Upload.php @@ -17,8 +17,10 @@ * You should have received a copy of the GNU General Public License * along with this program. If not, see . */ - namespace Pomf\Uguu\Classes; + require '../vendor/autoload.php'; + use Aws\S3\S3Client; + use Aws\Exception\AwsException; class Upload extends Response { @@ -152,28 +154,47 @@ // If its not a dupe then skip checking if file can be written and // skip inserting it into the DB. if (!$this->FILE_INFO['DUPE']) { - if (!is_dir($this->Connector->CONFIG['FILES_ROOT'])) { - $this->Connector->response->error(500, 'File storage path not accessible.'); + if (!$this->Connector->CONFIG['FILES_OBJ']) { + if (!is_dir($this->Connector->CONFIG['FILES_ROOT'])) { + $this->Connector->response->error(500, 'File storage path not accessible.'); + } + if ( + !move_uploaded_file( + $this->FILE_INFO['TEMP_NAME'], + $this->Connector->CONFIG['FILES_ROOT'] . + $this->FILE_INFO['FILENAME'], + ) + ) { + $this->Connector->response->error(500, 'Failed to move file to destination.'); + } + if (!chmod($this->Connector->CONFIG['FILES_ROOT'] . $this->FILE_INFO['FILENAME'], 0644)) { + $this->Connector->response->error(500, 'Failed to change file permissions.'); + } + $this->Connector->newIntoDB($this->FILE_INFO, $this->fingerPrintInfo); + $url = 'https://' . $this->Connector->CONFIG['FILE_DOMAIN'] . '/' . $this->FILE_INFO['FILENAME']; + } + // S3/Object Store upload + else { + $s3Client = new S3Client([ + 'profile' => $this->Connector->CONFIG['FILES_OBJ_PROFILE'], + 'region' => $this->Connector->CONFIG['FILES_OBJ_REGION'], + 'version' => "2006-03-01" + ]); + $result = $s3Client->putObject([ + 'Bucket' => $this->Connector->CONFIG['FILES_OBJ_BUCKET'], + 'Key' => $this->FILE_INFO['FILENAME'], + 'SourceFile' => $this->FILE_INFO['TEMP_NAME'], + 'ContentType' => $this->FILE_INFO['MIME'], + 'ContentDisposition' => 'inline; filename=' . $this->FILE_INFO['FILENAME'] + ]); + $url = $s3Client->getObjectUrl($this->Connector->CONFIG['FILES_OBJ_BUCKET'], $this->FILE_INFO['FILENAME']); } - if ( - !move_uploaded_file( - $this->FILE_INFO['TEMP_NAME'], - $this->Connector->CONFIG['FILES_ROOT'] . - $this->FILE_INFO['FILENAME'], - ) - ) { - $this->Connector->response->error(500, 'Failed to move file to destination.'); - } - if (!chmod($this->Connector->CONFIG['FILES_ROOT'] . $this->FILE_INFO['FILENAME'], 0644)) { - $this->Connector->response->error(500, 'Failed to change file permissions.'); - } - $this->Connector->newIntoDB($this->FILE_INFO, $this->fingerPrintInfo); } return [ 'hash' => $this->FILE_INFO['SHA1'], 'name' => $this->FILE_INFO['NAME'], 'filename' => $this->FILE_INFO['FILENAME'], - 'url' => 'https://' . $this->Connector->CONFIG['FILE_DOMAIN'] . '/' . $this->FILE_INFO['FILENAME'], + 'url' => $url, 'size' => $this->FILE_INFO['SIZE'], 'dupe' => $this->FILE_INFO['DUPE'], ]; diff --git a/src/composer.json b/src/composer.json index ea1b94d..97f0848 100644 --- a/src/composer.json +++ b/src/composer.json @@ -18,10 +18,11 @@ "minimum-stability": "stable", "require": { "ext-fileinfo": "*", - "ext-pdo": "*" + "ext-pdo": "*", + "aws/aws-sdk-php": "^3.269" }, "config": { "optimize-autoloader": true, "classmap-authoritative": true } -} \ No newline at end of file +} diff --git a/src/config.json b/src/config.json index 722255d..b891ed0 100755 --- a/src/config.json +++ b/src/config.json @@ -38,6 +38,10 @@ "RATE_LIMIT": false, "RATE_LIMIT_TIMEOUT": 60, "RATE_LIMIT_FILES": 100, + "FILES_OBJ": false, + "FILES_OBJ_BUCKET": "my_bucket", + "FILES_OBJ_REGION": "my_aws_region", + "FILES_OBJ_PROFILE": "default", "FILES_ROOT": "/var/www/files/", "FILES_RETRIES": 15, "NAME_LENGTH": 8, @@ -75,3 +79,4 @@ "image/svg+xml" ] } + diff --git a/src/static/terraform/main.tf b/src/static/terraform/main.tf new file mode 100644 index 0000000..dc8c386 --- /dev/null +++ b/src/static/terraform/main.tf @@ -0,0 +1,53 @@ +provider "aws" {} + +variable "uguu_bucket_name" { + type = string + description = "Bucket Name to be used for Uguu Storage Backend" +} + +variable "retention_days" { + type = number + description = "Number of hours for lifecycle policy to retain files before deleting them" + default = 2 +} + +resource "aws_s3_bucket" "uguu_bucket" { + bucket = var.uguu_bucket_name +} + +resource "aws_s3_bucket_lifecycle_configuration" "uguu_lc_policy" { + bucket = aws_s3_bucket.uguu_bucket.id + rule { + id = "delete-after-x-days" + status = "Enabled" + expiration { + days = var.retention_days + } + } +} + +resource "aws_s3_bucket_public_access_block" "uguu_public_block_policy" { + bucket = aws_s3_bucket.uguu_bucket.id +} + +resource "aws_s3_bucket_policy" "uguu_bucket_policy" { + bucket = aws_s3_bucket.uguu_bucket.id + policy = data.aws_iam_policy_document.allow_public_access.json +} + +data "aws_iam_policy_document" "allow_public_access" { + statement { + principals { + type = "*" + identifiers = ["*"] + } + + actions = [ + "s3:GetObject" + ] + + resources = [ + "${aws_s3_bucket.uguu_bucket.arn}/*" + ] + } +} \ No newline at end of file diff --git a/src/static/terraform/terraform.tfvars b/src/static/terraform/terraform.tfvars new file mode 100644 index 0000000..9f75dae --- /dev/null +++ b/src/static/terraform/terraform.tfvars @@ -0,0 +1,2 @@ +uguu_bucket_name = "YOUR_BUCKET_NAME" +retention_days = 2