Skip to content

Add S3 bucket management and file upload/download features #2

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
@@ -31,6 +31,11 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-log4j2</artifactId>
</dependency>
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>s3</artifactId>
<version>2.25.70</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
45 changes: 45 additions & 0 deletions src/main/java/com/app/config/AwsS3Config.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
package com.app.config;


import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3Client;

@Configuration
@ConditionalOnProperty(name = "aws.s3.enabled", havingValue = "true")
@Log4j2
public class AwsS3Config {

@Value("${aws.s3.region:us-east-1}")
private String awsRegion;
@Value("${aws.s3.accessKeyId:us-east-1}")
private String accessKeyId;
@Value("${aws.s3.secretAccessKey}")
private String secretAccessKey;

@Bean
public S3Client s3Client() {
try {
log.info("Trying to S3Client create.");
return S3Client.builder()
.region(Region.of(awsRegion))
.credentialsProvider(StaticCredentialsProvider.create(AwsBasicCredentials.create(accessKeyId, secretAccessKey)))
.overrideConfiguration(ClientOverrideConfiguration.builder()
.retryPolicy(r -> r.numRetries(3))
.build())
.build();
} catch (Exception e) {
log.error("Failed to create S3Client.", e);
throw e;
} finally {
log.info("S3Client created successfully.");
}
}
}
88 changes: 88 additions & 0 deletions src/main/java/com/app/controller/S3Controller.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
package com.app.controller;

import com.app.service.S3Service;
import lombok.RequiredArgsConstructor;
import org.springframework.core.io.InputStreamResource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.servlet.mvc.method.annotation.StreamingResponseBody;

import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipOutputStream;

@RestController
@RequiredArgsConstructor
@RequestMapping("/api/files")
public class S3Controller {

private final S3Service s3Service;

@PostMapping("/upload")
public ResponseEntity<String> uploadFile(@RequestPart("file") MultipartFile file,
@RequestParam(value = "isReadPublicly", defaultValue = "false") boolean isReadPublicly) {
boolean isUploaded = s3Service.uploadFile(file, isReadPublicly);
if (isUploaded) {
return ResponseEntity.ok("File uploaded successfully: " + file.getOriginalFilename());
} else {
return ResponseEntity.status(500).body("Failed to upload file: " + file.getOriginalFilename());
}
}

@GetMapping("/download/{key}")
public ResponseEntity<InputStreamResource> downloadFile(@PathVariable String key) {
InputStream fileStream = s3Service.downloadFileAsStream(key);
InputStreamResource resource = new InputStreamResource(fileStream);
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=" + key)
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.body(resource);
}

@GetMapping("/create-bucket")
public ResponseEntity<String> createBucket(@RequestParam String bucketName) {
return ResponseEntity.ok(s3Service.createBucket(bucketName));
}

@GetMapping("/bucket-list")
public ResponseEntity<List<String>> getBucketList() {
return ResponseEntity.ok(s3Service.getBucketList());
}

@GetMapping("/list-buckets-with-regions")
public Map<String, String> listBucketsWithRegions() {
return s3Service.listBucketsWithRegions();
}

@GetMapping("/download-all-files-zip")
public ResponseEntity<StreamingResponseBody> downloadAllFilesAsZip(@RequestParam String bucketName) {

// Streaming response to handle large files efficiently
StreamingResponseBody responseBody = outputStream -> {
try (ZipOutputStream zos = new ZipOutputStream(outputStream)) {
s3Service.streamAllFilesAsZip(bucketName, zos);
} catch (IOException e) {
throw new RuntimeException("Error while streaming files to output stream", e);
}
};

// Set headers for ZIP file download
HttpHeaders headers = new HttpHeaders();
headers.add("Content-Disposition", "attachment; filename=all-files.zip");
headers.add("Content-Type", "application/zip");

return new ResponseEntity<>(responseBody, headers, HttpStatus.OK);
}

@GetMapping("/move-files")
public String moveFiles(@RequestParam String sourceBucketName, @RequestParam String destinationBucketName) {
s3Service.moveFiles(sourceBucketName, destinationBucketName);
return "Files are being moved from " + sourceBucketName + " to " + destinationBucketName;
}
}
25 changes: 25 additions & 0 deletions src/main/java/com/app/service/S3Service.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package com.app.service;

import org.springframework.web.multipart.MultipartFile;

import java.io.InputStream;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipOutputStream;

public interface S3Service {

boolean uploadFile(MultipartFile file, boolean isReadPublicly);

InputStream downloadFileAsStream(String key);

String createBucket(String bucketName);

List<String> getBucketList() throws RuntimeException;

Map<String, String> listBucketsWithRegions();

void streamAllFilesAsZip(String bucketName, ZipOutputStream zos);

void moveFiles(String sourceBucketName, String destinationBucketName);
}
254 changes: 254 additions & 0 deletions src/main/java/com/app/service/S3ServiceImpl.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,254 @@
package com.app.service;

import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import software.amazon.awssdk.core.ResponseBytes;
import software.amazon.awssdk.core.ResponseInputStream;
import software.amazon.awssdk.core.exception.SdkException;
import software.amazon.awssdk.core.sync.RequestBody;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.model.*;

import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;

@Service
@Log4j2
@RequiredArgsConstructor
public class S3ServiceImpl implements S3Service {

private final S3Client s3Client;
private static final int THREAD_POOL_SIZE = 10; // Number of threads for concurrent execution


@Value("${aws.s3.bucket-name}")
private String bucketName;


@Override
public boolean uploadFile(MultipartFile file, boolean isReadPublicly) {
log.info("Started uploading file '{}' to S3 Bucket '{}'", file.getOriginalFilename(), bucketName);
PutObjectRequest putObjectRequest;
if (isReadPublicly) {
putObjectRequest = PutObjectRequest.builder()
.bucket(bucketName)
.key(file.getOriginalFilename()).acl("public-read")
.build();
} else {
putObjectRequest = PutObjectRequest.builder()
.bucket(bucketName)
.key(file.getOriginalFilename())
.build();
}
try {
s3Client.putObject(putObjectRequest, RequestBody.fromBytes(file.getBytes()));
log.info("Successfully uploaded file to S3. Bucket: {}, Key: {}", bucketName, file.getOriginalFilename());
return true;
} catch (Exception e) {
log.error("Failed to upload file to S3. Bucket: {}, Key: {}", bucketName, file.getOriginalFilename(), e);
return false;
}
}

@Override
public InputStream downloadFileAsStream(String key) {
try {
GetObjectRequest getObjectRequest = GetObjectRequest.builder()
.bucket(bucketName)
.key(key)
.build();

ResponseBytes<GetObjectResponse> getObjectResponse = s3Client.getObjectAsBytes(getObjectRequest);
if (getObjectResponse == null) {
log.warn("Failed to get file from S3 bucket: Response is null");
return new ByteArrayInputStream(new byte[0]);
}

log.info("Successfully getting file in bytes from S3 bucket.");
byte[] fileBytes = getObjectResponse.asByteArray();
return new ByteArrayInputStream(fileBytes);

} catch (S3Exception e) {
log.error("Failed to fetch object from S3 Bucket: {}, Key: {}", bucketName, key, e);
throw e;
} catch (SdkException e) {
log.error("Error while downloading file from S3 Bucket: {}, Key: {}", bucketName, key, e);
throw e;
}
}

public String createBucket(String bucketName) {
try {
CreateBucketRequest createBucketRequest = CreateBucketRequest.builder()
.bucket(bucketName)
.build();
CreateBucketResponse createBucketResponse = s3Client.createBucket(createBucketRequest);
return "Bucket created successfully: " + createBucketResponse.location();
} catch (S3Exception e) {
throw new RuntimeException("Failed to create bucket: " + e.awsErrorDetails().errorMessage(), e);
}
}

@Override
public List<String> getBucketList() throws RuntimeException {
try {
ListBucketsResponse listBucketsResponse = s3Client.listBuckets();
return listBucketsResponse.buckets().stream()
.map(Bucket::name)
.toList();
} catch (S3Exception e) {
throw new RuntimeException("Failed to list buckets: " + e.awsErrorDetails().errorMessage(), e);
}
}

@Override
public Map<String, String> listBucketsWithRegions() {
try {
ListBucketsResponse listBucketsResponse = s3Client.listBuckets();

// Create a map to store bucket names with their respective regions
Map<String, String> bucketRegions = new HashMap<>();

for (var bucket : listBucketsResponse.buckets()) {
String bucketRegion = getBucketRegion(bucket.name());
bucketRegions.put(bucket.name(), bucketRegion);
}

return bucketRegions;

} catch (S3Exception e) {
throw new RuntimeException("Failed to list buckets: " + e.awsErrorDetails().errorMessage(), e);
}
}

private String getBucketRegion(String bucketName) {
try {
GetBucketLocationRequest locationRequest = GetBucketLocationRequest.builder()
.bucket(bucketName)
.build();

GetBucketLocationResponse locationResponse = s3Client.getBucketLocation(locationRequest);

// Translate the bucket location constraint to a region name
Region region = locationResponse.locationConstraintAsString() == null ? Region.US_EAST_1 :
Region.of(locationResponse.locationConstraintAsString());

return region.id();
} catch (S3Exception e) {
return "Unknown"; // Handle the case where the region is not accessible or available
}
}

@Override
public void streamAllFilesAsZip(String bucketName, ZipOutputStream zos) {
ListObjectsV2Request listObjectsRequest = ListObjectsV2Request.builder()
.bucket(bucketName)
.build();

ListObjectsV2Response listObjectsResponse;
do {
listObjectsResponse = s3Client.listObjectsV2(listObjectsRequest);
List<S3Object> objects = listObjectsResponse.contents();

for (S3Object object : objects) {
addFileToZipStream(bucketName, object.key(), zos);
}

} while (listObjectsResponse.isTruncated());
}

private void addFileToZipStream(String bucketName, String keyName, ZipOutputStream zos) {
GetObjectRequest getObjectRequest = GetObjectRequest.builder()
.bucket(bucketName)
.key(keyName)
.build();

try (ResponseInputStream<?> s3ObjectStream = s3Client.getObject(getObjectRequest)) {
zos.putNextEntry(new ZipEntry(keyName));

byte[] buffer = new byte[1024];
int length;
while ((length = s3ObjectStream.read(buffer)) > 0) {
zos.write(buffer, 0, length);
}

zos.closeEntry();
} catch (IOException | S3Exception e) {
throw new RuntimeException("Failed to add file to ZIP: " + keyName, e);
}
}

@Override
public void moveFiles(String sourceBucketName, String destinationBucketName) {
ExecutorService executorService = Executors.newFixedThreadPool(THREAD_POOL_SIZE);
ListObjectsV2Request listObjectsRequest = ListObjectsV2Request.builder()
.bucket(sourceBucketName)
.build();

try {
ListObjectsV2Response listObjectsResponse;
do {
listObjectsResponse = s3Client.listObjectsV2(listObjectsRequest);
List<S3Object> objects = listObjectsResponse.contents();

for (S3Object object : objects) {
String keyName = object.key();
// Submit the copy and delete tasks to be executed concurrently
executorService.submit(() -> copyAndDeleteObject(sourceBucketName, destinationBucketName, keyName));
}

} while (listObjectsResponse.isTruncated());

} catch (S3Exception e) {
log.error("Failed to list objects from bucket: {} - {}", sourceBucketName, e.getMessage());
} finally {
executorService.shutdown();
try {
if (!executorService.awaitTermination(60, TimeUnit.SECONDS)) {
executorService.shutdownNow();
}
} catch (InterruptedException e) {
executorService.shutdownNow();
Thread.currentThread().interrupt();
}
}
}

private void copyAndDeleteObject(String sourceBucketName, String destinationBucketName, String keyName) {
try {
// Copy file to the destination bucket
CopyObjectRequest copyRequest = CopyObjectRequest.builder()
.sourceBucket(sourceBucketName)
.sourceKey(keyName)
.destinationBucket(destinationBucketName)
.destinationKey(keyName)
.build();
s3Client.copyObject(copyRequest);
log.info("Copied file: {} from {} to {}", keyName, sourceBucketName, destinationBucketName);

// Delete file from the source bucket
DeleteObjectRequest deleteRequest = DeleteObjectRequest.builder()
.bucket(sourceBucketName)
.key(keyName)
.build();
s3Client.deleteObject(deleteRequest);
log.info("Deleted file: {} from {}", keyName, sourceBucketName);

} catch (S3Exception e) {
log.error("Error while moving file: {} - {}", keyName, e.getMessage());
}
}
}
7 changes: 7 additions & 0 deletions src/main/resources/application-local.yml
Original file line number Diff line number Diff line change
@@ -2,3 +2,10 @@ app:
logs:
path: C:/${spring.application.name}/logs

aws:
s3:
enabled: true
region: us-east-1
accessKeyId: <<access key>>
secretAccessKey: <<secret access key>>
bucket-name: <<enter bucket name>>