diff --git a/src/main/java/io/kestra/plugin/aws/runner/Batch.java b/src/main/java/io/kestra/plugin/aws/runner/Batch.java index f6d0f43..d71f947 100644 --- a/src/main/java/io/kestra/plugin/aws/runner/Batch.java +++ b/src/main/java/io/kestra/plugin/aws/runner/Batch.java @@ -226,14 +226,16 @@ public class Batch extends TaskRunner implements AbstractS3, AbstractConnectionI private final Duration completionCheckInterval = Duration.ofSeconds(5); @Override - public RunnerResult run(RunContext runContext, TaskCommands taskCommands, List filesToUpload, List filesToDownload) throws Exception { + public RunnerResult run(RunContext runContext, TaskCommands taskCommands, List filesToDownload) throws Exception { boolean hasS3Bucket = this.bucket != null; String renderedBucket = runContext.render(bucket); - boolean hasFilesToUpload = !ListUtils.isEmpty(filesToUpload); + Logger logger = runContext.logger(); + List relativeWorkingDirectoryFilesPaths = taskCommands.relativeWorkingDirectoryFilesPaths(); + boolean hasFilesToUpload = !ListUtils.isEmpty(relativeWorkingDirectoryFilesPaths); if (hasFilesToUpload && !hasS3Bucket) { - throw new IllegalArgumentException("You must provide an S3 bucket in order to use `inputFiles` or `namespaceFiles`"); + logger.warn("Working directory is not empty but no S3 bucket are specified. You must provide an S3 bucket in order to use `inputFiles` or `namespaceFiles`. Skipping importing files to runner."); } boolean hasFilesToDownload = !ListUtils.isEmpty(filesToDownload); boolean outputDirectoryEnabled = taskCommands.outputDirectoryEnabled(); @@ -241,7 +243,6 @@ public RunnerResult run(RunContext runContext, TaskCommands taskCommands, List + relativeWorkingDirectoryFilesPaths.stream().map(relativePath -> UploadFileRequest.builder() .putObjectRequest( PutObjectRequest @@ -295,7 +296,7 @@ public RunnerResult run(RunContext runContext, TaskCommands taskCommands, List commands = ListUtils.emptyOnNull(filesToUpload).stream() + Stream commands = ListUtils.emptyOnNull(relativeWorkingDirectoryFilesPaths).stream() .map(relativePath -> "aws s3 cp " + s3WorkingDir + Path.of("/" + relativePath) + " " + batchWorkingDirectory + Path.of("/" + relativePath)); if (outputDirectoryEnabled) { commands = Stream.concat(commands, Stream.of("mkdir " + batchOutputDirectory));