CE-1772: s3 updates to allow content type specifications among other things

This commit is contained in:
Tim Chamberlain
2024-11-03 21:34:50 -06:00
parent efe89c7043
commit ce50120234
5 changed files with 61 additions and 26 deletions

View File

@ -28,7 +28,6 @@ import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -468,7 +467,8 @@ public class QValueFormatter
{
for(QFieldMetaData field : table.getFields().values())
{
if(field.getType().equals(QFieldType.BLOB))
Optional<FieldAdornment> fileDownloadAdornment = field.getAdornment(AdornmentType.FILE_DOWNLOAD);
if(fileDownloadAdornment.isPresent())
{
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// file name comes from: //
@ -478,20 +478,7 @@ public class QValueFormatter
// - tableLabel primaryKey fieldLabel //
// - and - if the FILE_DOWNLOAD adornment had a DEFAULT_EXTENSION, then it gets added (preceded by a dot) //
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
Optional<FieldAdornment> fileDownloadAdornment = field.getAdornment(AdornmentType.FILE_DOWNLOAD);
Map<String, Serializable> adornmentValues = Collections.emptyMap();
if(fileDownloadAdornment.isPresent())
{
adornmentValues = fileDownloadAdornment.get().getValues();
}
else
{
///////////////////////////////////////////////////////
// don't change blobs unless they are file-downloads //
///////////////////////////////////////////////////////
continue;
}
Map<String, Serializable> adornmentValues = fileDownloadAdornment.get().getValues();
String fileNameField = ValueUtils.getValueAsString(adornmentValues.get(AdornmentType.FileDownloadValues.FILE_NAME_FIELD));
String fileNameFormat = ValueUtils.getValueAsString(adornmentValues.get(AdornmentType.FileDownloadValues.FILE_NAME_FORMAT));
@ -542,7 +529,13 @@ public class QValueFormatter
}
}
record.setValue(field.getName(), "/data/" + table.getName() + "/" + primaryKey + "/" + field.getName() + "/" + fileName);
/////////////////////////////////////////////
// if field type is blob, update its value //
/////////////////////////////////////////////
if(QFieldType.BLOB.equals(field.getType()))
{
record.setValue(field.getName(), "/data/" + table.getName() + "/" + primaryKey + "/" + field.getName() + "/" + fileName);
}
record.setDisplayValue(field.getName(), fileName);
}
}

View File

@ -31,6 +31,7 @@ import com.kingsrook.qqq.backend.core.model.actions.AbstractTableActionInput;
public class StorageInput extends AbstractTableActionInput
{
private String reference;
private String contentType;
@ -74,4 +75,35 @@ public class StorageInput extends AbstractTableActionInput
return (this);
}
/*******************************************************************************
** Getter for contentType
*******************************************************************************/
public String getContentType()
{
return (this.contentType);
}
/*******************************************************************************
** Setter for contentType
*******************************************************************************/
public void setContentType(String contentType)
{
this.contentType = contentType;
}
/*******************************************************************************
** Fluent setter for contentType
*******************************************************************************/
public StorageInput withContentType(String contentType)
{
this.contentType = contentType;
return (this);
}
}

View File

@ -58,7 +58,7 @@ public class S3StorageAction extends AbstractS3Action implements QStorageInterfa
AmazonS3 amazonS3 = getS3Utils().getAmazonS3();
String fullPath = getFullPath(storageInput);
S3UploadOutputStream s3UploadOutputStream = new S3UploadOutputStream(amazonS3, backend.getBucketName(), fullPath);
S3UploadOutputStream s3UploadOutputStream = new S3UploadOutputStream(amazonS3, backend.getBucketName(), fullPath, storageInput.getContentType());
return (s3UploadOutputStream);
}
catch(Exception e)

View File

@ -53,6 +53,7 @@ public class S3UploadOutputStream extends OutputStream
private final AmazonS3 amazonS3;
private final String bucketName;
private final String key;
private final String contentType;
private byte[] buffer = new byte[5 * 1024 * 1024];
private int offset = 0;
@ -68,11 +69,12 @@ public class S3UploadOutputStream extends OutputStream
** Constructor
**
*******************************************************************************/
public S3UploadOutputStream(AmazonS3 amazonS3, String bucketName, String key)
public S3UploadOutputStream(AmazonS3 amazonS3, String bucketName, String key, String contentType)
{
this.amazonS3 = amazonS3;
this.bucketName = bucketName;
this.key = key;
this.contentType = contentType;
}
@ -96,6 +98,9 @@ public class S3UploadOutputStream extends OutputStream
*******************************************************************************/
private void uploadIfNeeded()
{
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentType(this.contentType);
if(offset == buffer.length)
{
//////////////////////////////////////////
@ -104,7 +109,8 @@ public class S3UploadOutputStream extends OutputStream
if(initiateMultipartUploadResult == null)
{
LOG.info("Initiating a multipart upload", logPair("key", key));
initiateMultipartUploadResult = amazonS3.initiateMultipartUpload(new InitiateMultipartUploadRequest(bucketName, key));
initiateMultipartUploadResult = amazonS3.initiateMultipartUpload(new InitiateMultipartUploadRequest(bucketName, key, objectMetadata));
uploadPartResultList = new ArrayList<>();
}
@ -115,7 +121,8 @@ public class S3UploadOutputStream extends OutputStream
.withInputStream(new ByteArrayInputStream(buffer))
.withBucketName(bucketName)
.withKey(key)
.withPartSize(buffer.length);
.withPartSize(buffer.length)
.withObjectMetadata(objectMetadata);
uploadPartResultList.add(amazonS3.uploadPart(uploadPartRequest));
@ -166,6 +173,9 @@ public class S3UploadOutputStream extends OutputStream
return;
}
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentType(this.contentType);
if(initiateMultipartUploadResult != null)
{
if(offset > 0)
@ -180,7 +190,8 @@ public class S3UploadOutputStream extends OutputStream
.withInputStream(new ByteArrayInputStream(buffer, 0, offset))
.withBucketName(bucketName)
.withKey(key)
.withPartSize(offset);
.withPartSize(offset)
.withObjectMetadata(objectMetadata);
uploadPartResultList.add(amazonS3.uploadPart(uploadPartRequest));
}
@ -194,7 +205,6 @@ public class S3UploadOutputStream extends OutputStream
else
{
LOG.info("Putting object (non-multipart)", logPair("key", key), logPair("length", offset));
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentLength(offset);
PutObjectResult putObjectResult = amazonS3.putObject(bucketName, key, new ByteArrayInputStream(buffer, 0, offset), objectMetadata);
}

View File

@ -31,7 +31,7 @@ import org.junit.jupiter.api.Test;
/*******************************************************************************
** Unit test for S3UploadOutputStream
** Unit test for S3UploadOutputStream
*******************************************************************************/
class S3UploadOutputStreamTest extends BaseS3Test
{
@ -57,11 +57,11 @@ class S3UploadOutputStreamTest extends BaseS3Test
outputStream.write("\n]\n".getBytes(StandardCharsets.UTF_8));
outputStream.close();
S3UploadOutputStream s3UploadOutputStream = new S3UploadOutputStream(getS3Utils().getAmazonS3(), bucketName, key);
S3UploadOutputStream s3UploadOutputStream = new S3UploadOutputStream(getS3Utils().getAmazonS3(), bucketName, key, null);
s3UploadOutputStream.write(outputStream.toByteArray(), 0, 5 * 1024 * 1024);
s3UploadOutputStream.write(outputStream.toByteArray(), 0, 3 * 1024 * 1024);
s3UploadOutputStream.write(outputStream.toByteArray(), 0, 3 * 1024 * 1024);
s3UploadOutputStream.close();
}
}
}