Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update backend type #1376

Merged
merged 5 commits into from
Jun 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,15 @@
## [v3.4.2.dev0]

### Added
-
- [Singularity] Added new singularity compute backend

### Changed
-

### Fixed
-


## [v3.4.1]

### Added
Expand Down
24 changes: 0 additions & 24 deletions lithops/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,27 +120,3 @@
'azure_vms',
'vm'
]

FAAS_BACKENDS = [
'ibm_cf',
'knative',
'openwhisk',
'aws_lambda',
'gcp_cloudrun',
'gcp_functions',
'cloudrun',
'azure_functions',
'azure_containers',
'aliyun_fc',
'oracle_f'
]

BATCH_BACKENDS = [
'ibm_vpc',
'aws_ec2',
'azure_vms',
'aws_batch',
'k8s',
'code_engine'
'vm'
]
5 changes: 4 additions & 1 deletion lithops/executors.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,8 @@ class FunctionExecutor:
:param backend: Compute backend to run the functions
:param storage: Storage backend to store Lithops data
:param monitoring: Monitoring system implementation. One of: storage, rabbitmq
:param log_level: Log level printing (INFO, DEBUG, ...). Set it to None to hide all logs. If this is param is set, all logging params in config are disabled
:param log_level: Log level printing (INFO, DEBUG, ...). Set it to None to hide all logs.
If this is param is set, all logging params in config are disabled
:param kwargs: Any parameter that can be set in the compute backend section of the config file, can be set here
"""

Expand Down Expand Up @@ -129,6 +130,8 @@ def __init__(
standalone_config = extract_standalone_config(self.config)
self.compute_handler = StandaloneHandler(standalone_config)

self.config['lithops']['backend_type'] = self.compute_handler.get_backend_type()

# Create the monitoring system
self.job_monitor = JobMonitor(
executor_id=self.executor_id,
Expand Down
20 changes: 16 additions & 4 deletions lithops/invokers.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,20 @@
from lithops.future import ResponseFuture
from lithops.config import extract_storage_config
from lithops.version import __version__
from lithops.utils import verify_runtime_name, version_str, is_lithops_worker, iterchunks
from lithops.constants import LOGGER_LEVEL, LOGS_DIR, SERVERLESS, SA_INSTALL_DIR, STANDALONE_BACKENDS
from lithops.utils import (
verify_runtime_name,
version_str,
is_lithops_worker,
iterchunks,
BackendType
)
from lithops.constants import (
LOGGER_LEVEL,
LOGS_DIR,
SERVERLESS,
SA_INSTALL_DIR,
STANDALONE_BACKENDS
)
from lithops.util.metrics import PrometheusExporter

logger = logging.getLogger(__name__)
Expand All @@ -39,7 +51,7 @@ def create_invoker(config, executor_id, internal_storage,
"""
Creates the appropriate invoker based on the backend type
"""
if compute_handler.get_backend_type() == 'batch':
if compute_handler.get_backend_type() == BackendType.BATCH.value:
return BatchInvoker(
config,
executor_id,
Expand All @@ -48,7 +60,7 @@ def create_invoker(config, executor_id, internal_storage,
job_monitor
)

elif compute_handler.get_backend_type() == 'faas':
elif compute_handler.get_backend_type() == BackendType.FAAS.value:
return FaaSInvoker(
config,
executor_id,
Expand Down
4 changes: 2 additions & 2 deletions lithops/job/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
create_job_key, func_key_suffix
from lithops.job.serialize import SerializeIndependent, create_module_data
from lithops.constants import MAX_AGG_DATA_SIZE, LOCALHOST, \
SERVERLESS, STANDALONE, CUSTOM_RUNTIME_DIR, FAAS_BACKENDS
SERVERLESS, STANDALONE, CUSTOM_RUNTIME_DIR


logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -297,7 +297,7 @@ def _create_job(
host_job_meta['host_func_upload_time'] = 0

# upload data
if upload_data or backend not in FAAS_BACKENDS:
if upload_data or config['lithops']['backend_type'] == utils.BackendType.BATCH.value:
# Upload iterdata to COS only if a single element is greater than MAX_DATA_IN_PAYLOAD
logger.debug('ExecutorID {} | JobID {} - Uploading data to the storage backend'
.format(executor_id, job_id))
Expand Down