CE-1955 - Boosting test-coverage during bulk-load rollout

This commit is contained in:
2024-11-25 11:27:44 -06:00
parent c883749ba9
commit 1c2638a5c4
7 changed files with 186 additions and 186 deletions

View File

@ -73,13 +73,13 @@ import com.kingsrook.qqq.backend.core.processes.implementations.bulk.delete.Bulk
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.delete.BulkDeleteTransformStep; import com.kingsrook.qqq.backend.core.processes.implementations.bulk.delete.BulkDeleteTransformStep;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.edit.BulkEditLoadStep; import com.kingsrook.qqq.backend.core.processes.implementations.bulk.edit.BulkEditLoadStep;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.edit.BulkEditTransformStep; import com.kingsrook.qqq.backend.core.processes.implementations.bulk.edit.BulkEditTransformStep;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertExtractStep;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertLoadStep; import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertLoadStep;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertPrepareFileMappingStep; import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertPrepareFileMappingStep;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertPrepareValueMappingStep; import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertPrepareValueMappingStep;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertReceiveFileMappingStep; import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertReceiveFileMappingStep;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertReceiveValueMappingStep; import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertReceiveValueMappingStep;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertTransformStep; import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertTransformStep;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.BulkInsertV2ExtractStep;
import com.kingsrook.qqq.backend.core.processes.implementations.etl.streamedwithfrontend.ExtractViaQueryStep; import com.kingsrook.qqq.backend.core.processes.implementations.etl.streamedwithfrontend.ExtractViaQueryStep;
import com.kingsrook.qqq.backend.core.processes.implementations.etl.streamedwithfrontend.StreamedETLWithFrontendProcess; import com.kingsrook.qqq.backend.core.processes.implementations.etl.streamedwithfrontend.StreamedETLWithFrontendProcess;
import com.kingsrook.qqq.backend.core.scheduler.QScheduleManager; import com.kingsrook.qqq.backend.core.scheduler.QScheduleManager;
@ -819,7 +819,7 @@ public class QInstanceEnricher
values.put(StreamedETLWithFrontendProcess.FIELD_DESTINATION_TABLE, table.getName()); values.put(StreamedETLWithFrontendProcess.FIELD_DESTINATION_TABLE, table.getName());
QProcessMetaData process = StreamedETLWithFrontendProcess.defineProcessMetaData( QProcessMetaData process = StreamedETLWithFrontendProcess.defineProcessMetaData(
BulkInsertV2ExtractStep.class, BulkInsertExtractStep.class,
BulkInsertTransformStep.class, BulkInsertTransformStep.class,
BulkInsertLoadStep.class, BulkInsertLoadStep.class,
values values

View File

@ -22,84 +22,106 @@
package com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert; package com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert;
import java.io.InputStream;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Objects;
import java.util.Map; import com.kingsrook.qqq.backend.core.actions.tables.StorageAction;
import java.util.Optional;
import com.kingsrook.qqq.backend.core.adapters.CsvToQRecordAdapter;
import com.kingsrook.qqq.backend.core.context.QContext;
import com.kingsrook.qqq.backend.core.exceptions.QException; import com.kingsrook.qqq.backend.core.exceptions.QException;
import com.kingsrook.qqq.backend.core.exceptions.QUserFacingException;
import com.kingsrook.qqq.backend.core.model.actions.processes.QUploadedFile;
import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepInput; import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepInput;
import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepOutput; import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepOutput;
import com.kingsrook.qqq.backend.core.model.actions.shared.mapping.QKeyBasedFieldMapping; import com.kingsrook.qqq.backend.core.model.actions.tables.storage.StorageInput;
import com.kingsrook.qqq.backend.core.model.metadata.fields.QFieldMetaData; import com.kingsrook.qqq.backend.core.model.data.QRecord;
import com.kingsrook.qqq.backend.core.model.metadata.tables.QTableMetaData; import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.filehandling.FileToRowsInterface;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.mapping.RowsToRecordInterface;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.model.BulkInsertMapping;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.model.BulkLoadFileRow;
import com.kingsrook.qqq.backend.core.processes.implementations.etl.streamedwithfrontend.AbstractExtractStep; import com.kingsrook.qqq.backend.core.processes.implementations.etl.streamedwithfrontend.AbstractExtractStep;
import com.kingsrook.qqq.backend.core.state.AbstractStateKey;
import com.kingsrook.qqq.backend.core.state.TempFileStateProvider;
/******************************************************************************* /*******************************************************************************
** Extract step for generic table bulk-insert ETL process ** Extract step for generic table bulk-insert ETL process
**
** This step does a little bit of transforming, actually - taking rows from
** an uploaded file, and potentially merging them (for child-table use-cases)
** and applying the "Mapping" - to put fully built records into the pipe for the
** Transform step.
*******************************************************************************/ *******************************************************************************/
public class BulkInsertExtractStep extends AbstractExtractStep public class BulkInsertExtractStep extends AbstractExtractStep
{ {
/***************************************************************************
**
***************************************************************************/
@Override @Override
public void run(RunBackendStepInput runBackendStepInput, RunBackendStepOutput runBackendStepOutput) throws QException public void run(RunBackendStepInput runBackendStepInput, RunBackendStepOutput runBackendStepOutput) throws QException
{ {
AbstractStateKey stateKey = (AbstractStateKey) runBackendStepInput.getValue(QUploadedFile.DEFAULT_UPLOADED_FILE_FIELD_NAME); int rowsAdded = 0;
Optional<QUploadedFile> optionalUploadedFile = TempFileStateProvider.getInstance().get(QUploadedFile.class, stateKey); int originalLimit = Objects.requireNonNullElse(getLimit(), Integer.MAX_VALUE);
if(optionalUploadedFile.isEmpty())
StorageInput storageInput = BulkInsertStepUtils.getStorageInputForTheFile(runBackendStepInput);
BulkInsertMapping bulkInsertMapping = (BulkInsertMapping) runBackendStepOutput.getValue("bulkInsertMapping");
RowsToRecordInterface rowsToRecord = bulkInsertMapping.getLayout().newRowsToRecordInterface();
try
(
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// open a stream to read from our file, and a FileToRows object, that knows how to read from that stream //
///////////////////////////////////////////////////////////////////////////////////////////////////////////
InputStream inputStream = new StorageAction().getInputStream(storageInput);
FileToRowsInterface fileToRowsInterface = FileToRowsInterface.forFile(storageInput.getReference(), inputStream);
)
{ {
throw (new QException("Could not find uploaded file")); ///////////////////////////////////////////////////////////
// read the header row (if this file & mapping uses one) //
///////////////////////////////////////////////////////////
BulkLoadFileRow headerRow = bulkInsertMapping.getHasHeaderRow() ? fileToRowsInterface.next() : null;
////////////////////////////////////////////////////////////////////////////////////////////////////////
// while there are more rows in the file - and we're under the limit - get more records form the file //
////////////////////////////////////////////////////////////////////////////////////////////////////////
while(fileToRowsInterface.hasNext() && rowsAdded < originalLimit)
{
int remainingLimit = originalLimit - rowsAdded;
////////////////////////////////////////////////////////////////////////////////////////////////////////////
// put a page-size limit on the rows-to-record class, so it won't be tempted to do whole file all at once //
////////////////////////////////////////////////////////////////////////////////////////////////////////////
int pageLimit = Math.min(remainingLimit, getMaxPageSize());
List<QRecord> page = rowsToRecord.nextPage(fileToRowsInterface, headerRow, bulkInsertMapping, pageLimit);
if(page.size() > remainingLimit)
{
/////////////////////////////////////////////////////////////
// in case we got back more than we asked for, sub-list it //
/////////////////////////////////////////////////////////////
page = page.subList(0, remainingLimit);
} }
byte[] bytes = optionalUploadedFile.get().getBytes(); /////////////////////////////////////////////
String fileName = optionalUploadedFile.get().getFilename(); // send this page of records into the pipe //
/////////////////////////////////////////////
///////////////////////////////////////////////////// getRecordPipe().addRecords(page);
// let the user specify field labels instead names // rowsAdded += page.size();
///////////////////////////////////////////////////// }
QTableMetaData table = runBackendStepInput.getTable(); }
String tableName = runBackendStepInput.getTableName(); catch(QException qe)
QKeyBasedFieldMapping mapping = new QKeyBasedFieldMapping();
for(Map.Entry<String, QFieldMetaData> entry : table.getFields().entrySet())
{ {
mapping.addMapping(entry.getKey(), entry.getValue().getLabel()); throw qe;
}
catch(Exception e)
{
throw new QException("Unhandled error in bulk insert extract step", e);
} }
////////////////////////////////////////////////////////////////////////// }
// get the non-editable fields - they'll be blanked out in a customizer //
//////////////////////////////////////////////////////////////////////////
List<QFieldMetaData> nonEditableFields = table.getFields().values().stream()
.filter(f -> !f.getIsEditable())
.toList();
if(fileName.toLowerCase(Locale.ROOT).endsWith(".csv"))
/***************************************************************************
**
***************************************************************************/
private int getMaxPageSize()
{ {
new CsvToQRecordAdapter().buildRecordsFromCsv(new CsvToQRecordAdapter.InputWrapper() return (1000);
.withRecordPipe(getRecordPipe())
.withLimit(getLimit())
.withCsv(new String(bytes))
.withDoCorrectValueTypes(true)
.withTable(QContext.getQInstance().getTable(tableName))
.withMapping(mapping)
.withRecordCustomizer((record) ->
{
////////////////////////////////////////////
// remove values from non-editable fields //
////////////////////////////////////////////
for(QFieldMetaData nonEditableField : nonEditableFields)
{
record.setValue(nonEditableField.getName(), null);
}
}));
}
else
{
throw (new QUserFacingException("Unsupported file type."));
}
} }
} }

View File

@ -1,127 +0,0 @@
/*
* QQQ - Low-code Application Framework for Engineers.
* Copyright (C) 2021-2022. Kingsrook, LLC
* 651 N Broad St Ste 205 # 6917 | Middletown DE 19709 | United States
* contact@kingsrook.com
* https://github.com/Kingsrook/
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert;
import java.io.InputStream;
import java.util.List;
import java.util.Objects;
import com.kingsrook.qqq.backend.core.actions.tables.StorageAction;
import com.kingsrook.qqq.backend.core.exceptions.QException;
import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepInput;
import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepOutput;
import com.kingsrook.qqq.backend.core.model.actions.tables.storage.StorageInput;
import com.kingsrook.qqq.backend.core.model.data.QRecord;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.filehandling.FileToRowsInterface;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.mapping.RowsToRecordInterface;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.model.BulkInsertMapping;
import com.kingsrook.qqq.backend.core.processes.implementations.bulk.insert.model.BulkLoadFileRow;
import com.kingsrook.qqq.backend.core.processes.implementations.etl.streamedwithfrontend.AbstractExtractStep;
/*******************************************************************************
** Extract step for generic table bulk-insert ETL process
**
** This step does a little bit of transforming, actually - taking rows from
** an uploaded file, and potentially merging them (for child-table use-cases)
** and applying the "Mapping" - to put fully built records into the pipe for the
** Transform step.
*******************************************************************************/
public class BulkInsertV2ExtractStep extends AbstractExtractStep
{
/***************************************************************************
**
***************************************************************************/
@Override
public void run(RunBackendStepInput runBackendStepInput, RunBackendStepOutput runBackendStepOutput) throws QException
{
int rowsAdded = 0;
int originalLimit = Objects.requireNonNullElse(getLimit(), Integer.MAX_VALUE);
StorageInput storageInput = BulkInsertStepUtils.getStorageInputForTheFile(runBackendStepInput);
BulkInsertMapping bulkInsertMapping = (BulkInsertMapping) runBackendStepOutput.getValue("bulkInsertMapping");
RowsToRecordInterface rowsToRecord = bulkInsertMapping.getLayout().newRowsToRecordInterface();
try
(
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// open a stream to read from our file, and a FileToRows object, that knows how to read from that stream //
///////////////////////////////////////////////////////////////////////////////////////////////////////////
InputStream inputStream = new StorageAction().getInputStream(storageInput);
FileToRowsInterface fileToRowsInterface = FileToRowsInterface.forFile(storageInput.getReference(), inputStream);
)
{
///////////////////////////////////////////////////////////
// read the header row (if this file & mapping uses one) //
///////////////////////////////////////////////////////////
BulkLoadFileRow headerRow = bulkInsertMapping.getHasHeaderRow() ? fileToRowsInterface.next() : null;
////////////////////////////////////////////////////////////////////////////////////////////////////////
// while there are more rows in the file - and we're under the limit - get more records form the file //
////////////////////////////////////////////////////////////////////////////////////////////////////////
while(fileToRowsInterface.hasNext() && rowsAdded < originalLimit)
{
int remainingLimit = originalLimit - rowsAdded;
////////////////////////////////////////////////////////////////////////////////////////////////////////////
// put a page-size limit on the rows-to-record class, so it won't be tempted to do whole file all at once //
////////////////////////////////////////////////////////////////////////////////////////////////////////////
int pageLimit = Math.min(remainingLimit, getMaxPageSize());
List<QRecord> page = rowsToRecord.nextPage(fileToRowsInterface, headerRow, bulkInsertMapping, pageLimit);
if(page.size() > remainingLimit)
{
/////////////////////////////////////////////////////////////
// in case we got back more than we asked for, sub-list it //
/////////////////////////////////////////////////////////////
page = page.subList(0, remainingLimit);
}
/////////////////////////////////////////////
// send this page of records into the pipe //
/////////////////////////////////////////////
getRecordPipe().addRecords(page);
rowsAdded += page.size();
}
}
catch(QException qe)
{
throw qe;
}
catch(Exception e)
{
throw new QException("Unhandled error in bulk insert extract step", e);
}
}
/***************************************************************************
**
***************************************************************************/
private int getMaxPageSize()
{
return (1000);
}
}

View File

@ -23,6 +23,7 @@ package com.kingsrook.qqq.backend.core.processes.implementations.etl.streamedwit
import com.kingsrook.qqq.backend.core.exceptions.QException; import com.kingsrook.qqq.backend.core.exceptions.QException;
import com.kingsrook.qqq.backend.core.logging.QLogger;
import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepInput; import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepInput;
import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepOutput; import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepOutput;
@ -33,6 +34,7 @@ import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepOutp
*******************************************************************************/ *******************************************************************************/
public class NoopLoadStep extends AbstractLoadStep public class NoopLoadStep extends AbstractLoadStep
{ {
private static final QLogger LOG = QLogger.getLogger(NoopLoadStep.class);
/******************************************************************************* /*******************************************************************************
@ -45,6 +47,7 @@ public class NoopLoadStep extends AbstractLoadStep
/////////// ///////////
// noop. // // noop. //
/////////// ///////////
LOG.trace("noop");
} }
} }

View File

@ -188,7 +188,7 @@ class BulkInsertFullProcessTest extends BaseTest
new BulkLoadProfileField().withFieldName("firstName").withColumnIndex(3), new BulkLoadProfileField().withFieldName("firstName").withColumnIndex(3),
new BulkLoadProfileField().withFieldName("lastName").withColumnIndex(4), new BulkLoadProfileField().withFieldName("lastName").withColumnIndex(4),
new BulkLoadProfileField().withFieldName("email").withDefaultValue(defaultEmail), new BulkLoadProfileField().withFieldName("email").withDefaultValue(defaultEmail),
new BulkLoadProfileField().withFieldName("homeStateId").withColumnIndex(7).withDoValueMapping(true), new BulkLoadProfileField().withFieldName("homeStateId").withColumnIndex(7).withDoValueMapping(true).withValueMappings(Map.of("Illinois", 1)),
new BulkLoadProfileField().withFieldName("noOfShoes").withColumnIndex(8) new BulkLoadProfileField().withFieldName("noOfShoes").withColumnIndex(8)
))); )));
}; };
@ -204,6 +204,7 @@ class BulkInsertFullProcessTest extends BaseTest
assertEquals("homeStateId", ((QFrontendFieldMetaData) valueMappingField).getName()); assertEquals("homeStateId", ((QFrontendFieldMetaData) valueMappingField).getName());
assertEquals(List.of("Missouri", "Illinois"), runProcessOutput.getValue("fileValues")); assertEquals(List.of("Missouri", "Illinois"), runProcessOutput.getValue("fileValues"));
assertEquals(List.of("homeStateId"), runProcessOutput.getValue("fieldNamesToDoValueMapping")); assertEquals(List.of("homeStateId"), runProcessOutput.getValue("fieldNamesToDoValueMapping"));
assertEquals(Map.of(1, "IL"), runProcessOutput.getValue("mappedValueLabels"));
assertEquals(0, runProcessOutput.getValue("valueMappingFieldIndex")); assertEquals(0, runProcessOutput.getValue("valueMappingFieldIndex"));
assertThat(runProcessOutput.getProcessState().getNextStepName()).isPresent().get().isEqualTo("valueMapping"); assertThat(runProcessOutput.getProcessState().getNextStepName()).isPresent().get().isEqualTo("valueMapping");

View File

@ -0,0 +1,50 @@
/*
* QQQ - Low-code Application Framework for Engineers.
* Copyright (C) 2021-2024. Kingsrook, LLC
* 651 N Broad St Ste 205 # 6917 | Middletown DE 19709 | United States
* contact@kingsrook.com
* https://github.com/Kingsrook/
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package com.kingsrook.qqq.backend.core.processes.implementations.etl.streamedwithfrontend;
import com.kingsrook.qqq.backend.core.BaseTest;
import com.kingsrook.qqq.backend.core.exceptions.QException;
import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepInput;
import com.kingsrook.qqq.backend.core.model.actions.processes.RunBackendStepOutput;
import org.junit.jupiter.api.Test;
/*******************************************************************************
** Unit test for NoopLoadStep
*******************************************************************************/
class NoopLoadStepTest extends BaseTest
{
/*******************************************************************************
**
*******************************************************************************/
@Test
void test() throws QException
{
//////////////////////////////////////
// sorry, just here for coverage... //
//////////////////////////////////////
new NoopLoadStep().runOnePage(new RunBackendStepInput(), new RunBackendStepOutput());
}
}

View File

@ -91,6 +91,57 @@ class AggregatesTest extends BaseTest
/*******************************************************************************
**
*******************************************************************************/
@Test
void testLong()
{
LongAggregates aggregates = new LongAggregates();
assertEquals(0, aggregates.getCount());
assertNull(aggregates.getMin());
assertNull(aggregates.getMax());
assertNull(aggregates.getSum());
assertNull(aggregates.getAverage());
aggregates.add(5L);
assertEquals(1, aggregates.getCount());
assertEquals(5, aggregates.getMin());
assertEquals(5, aggregates.getMax());
assertEquals(5, aggregates.getSum());
assertThat(aggregates.getAverage()).isCloseTo(new BigDecimal("5"), Offset.offset(BigDecimal.ZERO));
aggregates.add(10L);
assertEquals(2, aggregates.getCount());
assertEquals(5, aggregates.getMin());
assertEquals(10, aggregates.getMax());
assertEquals(15, aggregates.getSum());
assertThat(aggregates.getAverage()).isCloseTo(new BigDecimal("7.5"), Offset.offset(BigDecimal.ZERO));
aggregates.add(15L);
assertEquals(3, aggregates.getCount());
assertEquals(5, aggregates.getMin());
assertEquals(15, aggregates.getMax());
assertEquals(30, aggregates.getSum());
assertThat(aggregates.getAverage()).isCloseTo(new BigDecimal("10"), Offset.offset(BigDecimal.ZERO));
aggregates.add(null);
assertEquals(3, aggregates.getCount());
assertEquals(5, aggregates.getMin());
assertEquals(15, aggregates.getMax());
assertEquals(30, aggregates.getSum());
assertThat(aggregates.getAverage()).isCloseTo(new BigDecimal("10"), Offset.offset(BigDecimal.ZERO));
assertEquals(new BigDecimal("750"), aggregates.getProduct());
assertEquals(new BigDecimal("25.0000"), aggregates.getVariance());
assertEquals(new BigDecimal("5.0000"), aggregates.getStandardDeviation());
assertThat(aggregates.getVarP()).isCloseTo(new BigDecimal("16.6667"), Offset.offset(new BigDecimal(".0001")));
assertThat(aggregates.getStdDevP()).isCloseTo(new BigDecimal("4.0824"), Offset.offset(new BigDecimal(".0001")));
}
/******************************************************************************* /*******************************************************************************
** **
*******************************************************************************/ *******************************************************************************/