Skip to content

Commit

Permalink
Merge pull request #1376 from JosepSampe/lithops-dev
Browse files Browse the repository at this point in the history
Update backend type
  • Loading branch information
JosepSampe authored Jun 12, 2024
2 parents c2fbba1 + 4be7b4b commit 9973ac4
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 32 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,15 @@
## [v3.4.2.dev0]

### Added
-
- [Singularity] Added new singularity compute backend

### Changed
-

### Fixed
-


## [v3.4.1]

### Added
Expand Down
24 changes: 0 additions & 24 deletions lithops/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,27 +120,3 @@
'azure_vms',
'vm'
]

FAAS_BACKENDS = [
'ibm_cf',
'knative',
'openwhisk',
'aws_lambda',
'gcp_cloudrun',
'gcp_functions',
'cloudrun',
'azure_functions',
'azure_containers',
'aliyun_fc',
'oracle_f'
]

BATCH_BACKENDS = [
'ibm_vpc',
'aws_ec2',
'azure_vms',
'aws_batch',
'k8s',
'code_engine'
'vm'
]
5 changes: 4 additions & 1 deletion lithops/executors.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,8 @@ class FunctionExecutor:
:param backend: Compute backend to run the functions
:param storage: Storage backend to store Lithops data
:param monitoring: Monitoring system implementation. One of: storage, rabbitmq
:param log_level: Log level printing (INFO, DEBUG, ...). Set it to None to hide all logs. If this is param is set, all logging params in config are disabled
:param log_level: Log level printing (INFO, DEBUG, ...). Set it to None to hide all logs.
If this is param is set, all logging params in config are disabled
:param kwargs: Any parameter that can be set in the compute backend section of the config file, can be set here
"""

Expand Down Expand Up @@ -129,6 +130,8 @@ def __init__(
standalone_config = extract_standalone_config(self.config)
self.compute_handler = StandaloneHandler(standalone_config)

self.config['lithops']['backend_type'] = self.compute_handler.get_backend_type()

# Create the monitoring system
self.job_monitor = JobMonitor(
executor_id=self.executor_id,
Expand Down
20 changes: 16 additions & 4 deletions lithops/invokers.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,20 @@
from lithops.future import ResponseFuture
from lithops.config import extract_storage_config
from lithops.version import __version__
from lithops.utils import verify_runtime_name, version_str, is_lithops_worker, iterchunks
from lithops.constants import LOGGER_LEVEL, LOGS_DIR, SERVERLESS, SA_INSTALL_DIR, STANDALONE_BACKENDS
from lithops.utils import (
verify_runtime_name,
version_str,
is_lithops_worker,
iterchunks,
BackendType
)
from lithops.constants import (
LOGGER_LEVEL,
LOGS_DIR,
SERVERLESS,
SA_INSTALL_DIR,
STANDALONE_BACKENDS
)
from lithops.util.metrics import PrometheusExporter

logger = logging.getLogger(__name__)
Expand All @@ -39,7 +51,7 @@ def create_invoker(config, executor_id, internal_storage,
"""
Creates the appropriate invoker based on the backend type
"""
if compute_handler.get_backend_type() == 'batch':
if compute_handler.get_backend_type() == BackendType.BATCH.value:
return BatchInvoker(
config,
executor_id,
Expand All @@ -48,7 +60,7 @@ def create_invoker(config, executor_id, internal_storage,
job_monitor
)

elif compute_handler.get_backend_type() == 'faas':
elif compute_handler.get_backend_type() == BackendType.FAAS.value:
return FaaSInvoker(
config,
executor_id,
Expand Down
4 changes: 2 additions & 2 deletions lithops/job/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
create_job_key, func_key_suffix
from lithops.job.serialize import SerializeIndependent, create_module_data
from lithops.constants import MAX_AGG_DATA_SIZE, LOCALHOST, \
SERVERLESS, STANDALONE, CUSTOM_RUNTIME_DIR, FAAS_BACKENDS
SERVERLESS, STANDALONE, CUSTOM_RUNTIME_DIR


logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -297,7 +297,7 @@ def _create_job(
host_job_meta['host_func_upload_time'] = 0

# upload data
if upload_data or backend not in FAAS_BACKENDS:
if upload_data or config['lithops']['backend_type'] == utils.BackendType.BATCH.value:
# Upload iterdata to COS only if a single element is greater than MAX_DATA_IN_PAYLOAD
logger.debug('ExecutorID {} | JobID {} - Uploading data to the storage backend'
.format(executor_id, job_id))
Expand Down

0 comments on commit 9973ac4

Please sign in to comment.